instruction
stringclasses 1
value | output
stringlengths 64
69.4k
| input
stringlengths 205
32.4k
|
---|---|---|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void recalculatePermissions() {
clearPermissions();
Set<Permission> defaults = Bukkit.getServer().getPluginManager().getDefaultPermissions(isOp());
Bukkit.getServer().getPluginManager().subscribeToDefaultPerms(isOp(), parent);
for (Permission perm : defaults) {
String name = perm.getName().toLowerCase();
permissions.put(name, new PermissionAttachmentInfo(parent, name, null, true));
Bukkit.getServer().getPluginManager().subscribeToPermission(name, parent);
calculateChildPermissions(perm.getChildren(), false, null);
}
for (PermissionAttachment attachment : attachments) {
calculateChildPermissions(attachment.getPermissions(), false, attachment);
}
}
|
#vulnerable code
public void recalculatePermissions() {
dirtyPermissions = true;
}
#location 2
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void test() {
JedisPoolConfig config = new JedisPoolConfig();
// 设置空间连接
config.setMaxIdle(20);
config.setMaxWaitMillis(1000);
// JedisPool pool = new JedisPool(config, "27.126.180.210", 6379);
// System.out.println(pool.getResource());
// Jedis jedis = pool.getResource();
// jedis.set("name", "陈杰");
// System.out.println(jedis.get("name"));
}
|
#vulnerable code
@Test
public void test() {
JedisPoolConfig config = new JedisPoolConfig();
// 设置空间连接
config.setMaxIdle(20);
config.setMaxWaitMillis(1000);
JedisPool pool = new JedisPool(config, "27.126.180.210", 6379);
System.out.println(pool.getResource());
Jedis jedis = pool.getResource();
jedis.set("name", "陈杰");
System.out.println(jedis.get("name"));
}
#location 12
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public File saveAs(String path) throws IOException, InterruptedException {
byte[] pdf = this.getPDF();
File file = new File(path);
BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(new FileOutputStream(file));
bufferedOutputStream.write(pdf);
bufferedOutputStream.flush();
bufferedOutputStream.close();
return file;
}
|
#vulnerable code
public File saveAs(String path) throws IOException, InterruptedException {
Runtime rt = Runtime.getRuntime();
String command = this.commandWithParameters() + Symbol.separator + path;
Process proc = rt.exec(command);
if(htmlFromString) {
OutputStream stdin = proc.getOutputStream();
stdin.write(htmlInput.getBytes());
stdin.close();
}
proc.waitFor();
if(proc.exitValue() != 0) {
throw new RuntimeException("Process (" + command + ") exited with status code " + proc.exitValue());
}
return new File(path);
}
#location 16
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public byte[] getPDF() throws IOException, InterruptedException {
return getPDF(STDOUT);
}
|
#vulnerable code
public byte[] getPDF() throws IOException, InterruptedException {
Runtime runtime = Runtime.getRuntime();
if(htmlFromString && !this.params.contains(new Param("-"))) {
this.addParam(new Param("-"));
}
String command = this.commandWithParameters() + Symbol.separator + "-";
Process process = runtime.exec(command);
if(htmlFromString) {
OutputStream stdInStream = process.getOutputStream();
stdInStream.write(htmlInput.getBytes());
stdInStream.close();
}
InputStream stdOutStream = process.getInputStream();
InputStream stdErrStream = process.getErrorStream();
process.waitFor();
ByteArrayOutputStream stdOut = new ByteArrayOutputStream();
ByteArrayOutputStream stdErr = new ByteArrayOutputStream();
for(int i = 0; i < stdOutStream.available(); i++) {
stdOut.write((char) stdOutStream.read());
}
stdOutStream.close();
for(int i = 0; i < stdErrStream.available(); i++) {
stdErr.write((char) stdErrStream.read());
}
stdErrStream.close();
if(process.exitValue() != 0) {
throw new RuntimeException("Process (" + command + ") exited with status code " + process.exitValue() + ":\n"+new String(stdErr.toByteArray()));
}
return stdOut.toByteArray();
}
#location 33
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public File saveAs(String path) throws IOException, InterruptedException {
byte[] pdf = this.getPDF();
File file = new File(path);
BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(new FileOutputStream(file));
bufferedOutputStream.write(pdf);
bufferedOutputStream.flush();
bufferedOutputStream.close();
return file;
}
|
#vulnerable code
public File saveAs(String path) throws IOException, InterruptedException {
Runtime rt = Runtime.getRuntime();
String command = this.commandWithParameters() + Symbol.separator + path;
Process proc = rt.exec(command);
if(htmlFromString) {
OutputStream stdin = proc.getOutputStream();
stdin.write(htmlInput.getBytes());
stdin.close();
}
proc.waitFor();
if(proc.exitValue() != 0) {
throw new RuntimeException("Process (" + command + ") exited with status code " + proc.exitValue());
}
return new File(path);
}
#location 13
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public byte[] getPDF() throws IOException, InterruptedException {
return getPDF(STDOUT);
}
|
#vulnerable code
public byte[] getPDF() throws IOException, InterruptedException {
Runtime runtime = Runtime.getRuntime();
if(htmlFromString && !this.params.contains(new Param("-"))) {
this.addParam(new Param("-"));
}
String command = this.commandWithParameters() + Symbol.separator + "-";
Process process = runtime.exec(command);
if(htmlFromString) {
OutputStream stdInStream = process.getOutputStream();
stdInStream.write(htmlInput.getBytes());
stdInStream.close();
}
InputStream stdOutStream = process.getInputStream();
InputStream stdErrStream = process.getErrorStream();
process.waitFor();
ByteArrayOutputStream stdOut = new ByteArrayOutputStream();
ByteArrayOutputStream stdErr = new ByteArrayOutputStream();
for(int i = 0; i < stdOutStream.available(); i++) {
stdOut.write((char) stdOutStream.read());
}
stdOutStream.close();
for(int i = 0; i < stdErrStream.available(); i++) {
stdErr.write((char) stdErrStream.read());
}
stdErrStream.close();
if(process.exitValue() != 0) {
throw new RuntimeException("Process (" + command + ") exited with status code " + process.exitValue() + ":\n"+new String(stdErr.toByteArray()));
}
return stdOut.toByteArray();
}
#location 30
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testComplexCoreFields()
{
User carter = DataGenerator.carter();
carter.getFavoriteArtists().add( DataGenerator.neutralMilkHotel() );
carter.getFavoriteArtists().add( DataGenerator.arcadeFire() );
ResultTraverser traverser = new ResultTraverser();
traverser.getFieldPopulatorRegistry().register( new UserFieldPopulatorWithArtistCoreField() );
Map<String, Object> objectTree = doTraverse( carter, ":", traverser, _simpleContext );
Assert.assertEquals( 3, objectTree.size() );
Assert.assertEquals( carter.getId(), objectTree.get( "id" ) );
Assert.assertEquals( carter.getName(), objectTree.get( "name" ) );
List<Map<String, Object>> favoriteArtists = getList( objectTree, "favoriteArtists" );
Assert.assertNotNull( favoriteArtists );
Assert.assertEquals( 2, favoriteArtists.size() );
Map<String, Object> neutralMap = findItem( favoriteArtists, "name", "Neutral Milk Hotel" );
Assert.assertEquals( DataGenerator.neutralMilkHotel().getId(), neutralMap.get( "id" ) );
Map<String, Object> arcadeMap = findItem( favoriteArtists, "name", "Arcade Fire" );
Assert.assertEquals( DataGenerator.arcadeFire().getId(), arcadeMap.get( "id" ) );
}
|
#vulnerable code
@Test
public void testComplexCoreFields()
{
User carter = DataGenerator.carter();
carter.getFavoriteArtists().add( DataGenerator.neutralMilkHotel() );
carter.getFavoriteArtists().add( DataGenerator.arcadeFire() );
ResultTraverser traverser = new ResultTraverser();
MapSelector selector = new MapSelector();
selector.register( User.class, "id", "favoriteArtists" );
Map<String, Object> objectTree = doTraverse( carter, traverser, _simpleContext, new CompositeSelector( selector, new CoreSelector() ) );
Assert.assertTrue( objectTree.size() >= 2 );
Assert.assertEquals( carter.getId(), objectTree.get( "id" ) );
Assert.assertEquals( carter.getName(), objectTree.get( "name" ) );
List<Map<String, Object>> favoriteArtists = getList( objectTree, "favoriteArtists" );
Assert.assertNotNull( favoriteArtists );
Assert.assertEquals( 2, favoriteArtists.size() );
Map<String, Object> neutralMap = findItem( favoriteArtists, "name", "Neutral Milk Hotel" );
Assert.assertEquals( DataGenerator.neutralMilkHotel().getId(), neutralMap.get( "id" ) );
Map<String, Object> arcadeMap = findItem( favoriteArtists, "name", "Arcade Fire" );
Assert.assertEquals( DataGenerator.arcadeFire().getId(), arcadeMap.get( "id" ) );
}
#location 20
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void init()
{
try
{
init( Thread.currentThread().getContextClassLoader().getResourceAsStream( "sampledb.sql" ) );
}
catch ( Exception e )
{
throw new RuntimeException( e );
}
// this doesn't seem to work yet. It reads a partial line and throws up
// try
// {
// init( new GZIPInputStream( new URL( remoteData ).openStream() ) );
// }
// catch ( Exception e )
// {
// e.printStackTrace();
// }
}
|
#vulnerable code
public void init() throws IOException
{
InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream( "sampledb.sql" );
BufferedReader reader = new BufferedReader( new InputStreamReader( is ) );
String line = null;
while ( ( line = reader.readLine() ) != null )
{
String type = line.replaceFirst( "INSERT INTO ([^(]+).*", "$1" );
String[] values = line.replaceFirst( ".*VALUES\\((.*)\\)", "$1" ).split( ", " );
if ("User".equalsIgnoreCase( type ))
{
newUser( toLong( values[ 0 ] ), toStr( values[ 1 ] ) );
}
else if ("Friend".equalsIgnoreCase( type ))
{
newFriend( toLong( values[ 0 ] ), toLong( values[ 1 ] ) );
}
else if ("Artist".equalsIgnoreCase( type ))
{
newArtist( toLong( values[ 0 ] ), toStr( values[ 1 ] ) );
}
else if ("Fan".equalsIgnoreCase( type ))
{
newFan( toLong( values[ 0 ] ), toLong( values[ 1 ] ) );
}
else if ("Album".equalsIgnoreCase( type ))
{
newAlbum( toLong( values[ 0 ] ), toStr( values[ 1 ] ), toLong( values[ 2 ] ), new Integer( values[ 3 ].trim() ) );
}
else if ("Song".equalsIgnoreCase( type ))
{
newSong( toLong( values[ 0 ] ), toStr( values[ 1 ] ), toLong( values[ 2 ] ), toLong( values[ 3 ] ) );
}
}
is.close();
}
#location 13
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testView() throws IOException {
UserDefinedFileAttributeView view = provider.getView(attributeStoreSupplier());
assertNotNull(view);
ASSERT.that(view.name()).is("user");
ASSERT.that(view.list()).isEmpty();
byte[] b1 = {0, 1, 2};
byte[] b2 = {0, 1, 2, 3, 4};
view.write("b1", ByteBuffer.wrap(b1));
view.write("b2", ByteBuffer.wrap(b2));
ASSERT.that(view.list()).has().allOf("b1", "b2");
ASSERT.that(store.getAttributeKeys()).has().exactly("user:b1", "user:b2");
ASSERT.that(view.size("b1")).is(3);
ASSERT.that(view.size("b2")).is(5);
ByteBuffer buf1 = ByteBuffer.allocate(view.size("b1"));
ByteBuffer buf2 = ByteBuffer.allocate(view.size("b2"));
view.read("b1", buf1);
view.read("b2", buf2);
ASSERT.that(Arrays.equals(b1, buf1.array())).isTrue();
ASSERT.that(Arrays.equals(b2, buf2.array())).isTrue();
view.delete("b2");
ASSERT.that(view.list()).has().exactly("b1");
ASSERT.that(store.getAttributeKeys()).has().exactly("user:b1");
try {
view.size("b2");
fail();
} catch (IllegalArgumentException expected) {
ASSERT.that(expected.getMessage()).contains("not set");
}
try {
view.read("b2", ByteBuffer.allocate(10));
fail();
} catch (IllegalArgumentException expected) {
ASSERT.that(expected.getMessage()).contains("not set");
}
view.write("b1", ByteBuffer.wrap(b2));
ASSERT.that(view.size("b1")).is(5);
view.delete("b2"); // succeeds
}
|
#vulnerable code
@Test
public void testView() throws IOException {
UserDefinedFileAttributeView view =
service.getFileAttributeView(fileSupplier(), UserDefinedFileAttributeView.class);
assertNotNull(view);
ASSERT.that(view.name()).is("user");
ASSERT.that(view.list()).isEmpty();
byte[] b1 = {0, 1, 2};
byte[] b2 = {0, 1, 2, 3, 4};
view.write("b1", ByteBuffer.wrap(b1));
view.write("b2", ByteBuffer.wrap(b2));
ASSERT.that(view.list()).has().allOf("b1", "b2");
ASSERT.that(service.readAttributes(file, "user:*").keySet())
.has().allOf("b1", "b2");
ASSERT.that(view.size("b1")).is(3);
ASSERT.that(view.size("b2")).is(5);
ByteBuffer buf1 = ByteBuffer.allocate(view.size("b1"));
ByteBuffer buf2 = ByteBuffer.allocate(view.size("b2"));
view.read("b1", buf1);
view.read("b2", buf2);
ASSERT.that(Arrays.equals(b1, buf1.array())).isTrue();
ASSERT.that(Arrays.equals(b2, buf2.array())).isTrue();
view.delete("b2");
ASSERT.that(view.list()).has().exactly("b1");
ASSERT.that(service.readAttributes(file, "user:*").keySet())
.has().exactly("b1");
try {
view.size("b2");
fail();
} catch (IllegalArgumentException expected) {
ASSERT.that(expected.getMessage()).contains("not set");
}
try {
view.read("b2", ByteBuffer.allocate(10));
fail();
} catch (IllegalArgumentException expected) {
ASSERT.that(expected.getMessage()).contains("not set");
}
view.write("b1", ByteBuffer.wrap(b2));
ASSERT.that(view.size("b1")).is(5);
view.delete("b2"); // succeeds
}
#location 7
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testAsynchronousClose() throws Exception {
RegularFile file = regularFile(10);
final FileChannel channel = channel(file, READ, WRITE);
file.writeLock().lock(); // ensure all operations on the channel will block
ExecutorService executor = Executors.newCachedThreadPool();
CountDownLatch latch = new CountDownLatch(BLOCKING_OP_COUNT);
List<Future<?>> futures = queueAllBlockingOperations(channel, executor, latch);
// wait for all the threads to have started running
latch.await();
// then ensure time for operations to start blocking
Uninterruptibles.sleepUninterruptibly(20, MILLISECONDS);
// close channel on this thread
channel.close();
// the blocking operations are running on different threads, so they all get
// AsynchronousCloseException
for (Future<?> future : futures) {
try {
future.get();
fail();
} catch (ExecutionException expected) {
assertThat(expected.getCause()).named("blocking thread exception")
.isA(AsynchronousCloseException.class);
}
}
}
|
#vulnerable code
@Test
public void testAsynchronousClose() throws IOException, InterruptedException {
RegularFile file = regularFile(10);
final FileChannel channel = channel(file, READ, WRITE);
file.writeLock().lock(); // ensure all operations on the channel will block
ExecutorService executor = Executors.newCachedThreadPool();
List<Future<?>> futures = queueAllBlockingOperations(channel, executor);
// ensure time for operations to start blocking
Uninterruptibles.sleepUninterruptibly(10, MILLISECONDS);
channel.close();
for (Future<?> future : futures) {
try {
future.get();
fail();
} catch (ExecutionException expected) {
assertTrue(expected.getCause() instanceof AsynchronousCloseException);
}
}
}
#location 3
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public WatchService newWatchService() throws IOException {
return watchServiceConfig.newWatchService(defaultView, pathService);
}
|
#vulnerable code
@Override
public WatchService newWatchService() throws IOException {
return new PollingWatchService(defaultView, pathService, fileStore.state());
}
#location 3
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testCloseByInterrupt() throws Exception {
RegularFile file = regularFile(10);
final FileChannel channel = channel(file, READ, WRITE);
file.writeLock().lock(); // ensure all operations on the channel will block
ExecutorService executor = Executors.newCachedThreadPool();
final CountDownLatch threadStartLatch = new CountDownLatch(1);
final SettableFuture<Throwable> interruptException = SettableFuture.create();
// This thread, being the first to run, will be blocking on the interruptible lock (the byte
// file's write lock) and as such will be interrupted properly... the other threads will be
// blocked on the lock that guards the position field and the specification that only one method
// on the channel will be in progress at a time. That lock is not interruptible, so we must
// interrupt this thread.
Thread thread = new Thread(new Runnable() {
@Override
public void run() {
threadStartLatch.countDown();
try {
channel.write(ByteBuffer.allocate(20));
interruptException.set(null);
} catch (Throwable e) {
interruptException.set(e);
}
}
});
thread.start();
// let the thread start running
threadStartLatch.await();
// then ensure time for thread to start blocking on the write lock
Uninterruptibles.sleepUninterruptibly(10, MILLISECONDS);
CountDownLatch blockingStartLatch = new CountDownLatch(BLOCKING_OP_COUNT);
List<Future<?>> futures = queueAllBlockingOperations(channel, executor, blockingStartLatch);
// wait for all blocking threads to start
blockingStartLatch.await();
// then ensure time for the operations to start blocking
Uninterruptibles.sleepUninterruptibly(20, MILLISECONDS);
// interrupting this blocking thread closes the channel and makes all the other threads
// throw AsynchronousCloseException... the operation on this thread should throw
// ClosedByInterruptException
thread.interrupt();
// get the exception that caused the interrupted operation to terminate
assertThat(interruptException.get(200, MILLISECONDS))
.named("interrupted thread exception")
.isA(ClosedByInterruptException.class);
// check that each other thread got AsynchronousCloseException (since the interrupt, on a
// different thread, closed the channel)
for (Future<?> future : futures) {
try {
future.get();
fail();
} catch (ExecutionException expected) {
assertThat(expected.getCause()).named("blocking thread exception")
.isA(AsynchronousCloseException.class);
}
}
}
|
#vulnerable code
@Test
public void testCloseByInterrupt() throws IOException, InterruptedException {
RegularFile file = regularFile(10);
final FileChannel channel = channel(file, READ, WRITE);
file.writeLock().lock(); // ensure all operations on the channel will block
ExecutorService executor = Executors.newCachedThreadPool();
final CountDownLatch latch = new CountDownLatch(1);
final AtomicReference<Throwable> interruptException = new AtomicReference<>();
// This thread, being the first to run, will be blocking on the interruptible lock (the byte
// file's write lock) and as such will be interrupted properly... the other threads will be
// blocked on the lock that guards the position field and the specification that only one method
// on the channel will be in progress at a time. That lock is not interruptible, so we must
// interrupt this thread.
Thread thread = new Thread(new Runnable() {
@Override
public void run() {
try {
channel.write(ByteBuffer.allocate(20));
latch.countDown();
} catch (Throwable e) {
interruptException.set(e);
latch.countDown();
}
}
});
thread.start();
// ensure time for thread to start blocking on the write lock
Uninterruptibles.sleepUninterruptibly(5, MILLISECONDS);
List<Future<?>> futures = queueAllBlockingOperations(channel, executor);
// ensure time for operations to start blocking
Uninterruptibles.sleepUninterruptibly(10, MILLISECONDS);
// interrupting this blocking thread closes the channel and makes all the other threads
// throw AsynchronousCloseException... the operation on this thread should throw
// ClosedByInterruptException
thread.interrupt();
latch.await();
assertTrue(interruptException.get() instanceof ClosedByInterruptException);
for (Future<?> future : futures) {
try {
future.get();
fail();
} catch (ExecutionException expected) {
assertTrue(expected.getCause() instanceof AsynchronousCloseException);
}
}
}
#location 3
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static void main(String[] args) {
loadProperties();
int concurrents = Integer.parseInt(properties.getProperty("concurrents"));
int runtime = Integer.parseInt(properties.getProperty("runtime"));
String classname = properties.getProperty("classname");
String params = properties.getProperty("params");
isMultiClient = Boolean.parseBoolean(properties.getProperty("isMultiClient"));
if (args.length == 5) {
concurrents = Integer.parseInt(args[0]);
runtime = Integer.parseInt(args[1]);
classname = args[2];
params = args[3];
isMultiClient = Boolean.parseBoolean(args[4]);
}
ApplicationContext applicationContext = new ClassPathXmlApplicationContext(
new String[]{"classpath*:motan-benchmark-client.xml"});
benchmarkService = (BenchmarkService) applicationContext.getBean("motanBenchmarkReferer");
new MotanBenchmarkClient().start(concurrents, runtime, classname, params);
}
|
#vulnerable code
public static void main(String[] args) {
int concurrents = Integer.parseInt(properties.getProperty("concurrents"));
int runtime = Integer.parseInt(properties.getProperty("runtime"));
String classname = properties.getProperty("classname");
String params = properties.getProperty("params");
isMultiClient = Boolean.parseBoolean(properties.getProperty("isMultiClient"));
if (args.length == 5) {
concurrents = Integer.parseInt(args[0]);
runtime = Integer.parseInt(args[1]);
classname = args[2];
params = args[3];
isMultiClient = Boolean.parseBoolean(args[4]);
}
ApplicationContext applicationContext = new ClassPathXmlApplicationContext(new String[]{"classpath*:motan-benchmark-client.xml"});
benchmarkService = (BenchmarkService) applicationContext.getBean("motanBenchmarkReferer");
new MotanBenchmarkClient().start(concurrents, runtime, classname, params);
}
#location 16
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Before
public void setup() throws DBException {
orientDBClient = new OrientDBClient();
Properties p = new Properties();
// TODO: Extract the property names into final variables in OrientDBClient
p.setProperty("orientdb.url", TEST_DB_URL);
orientDBClient.setProperties(p);
orientDBClient.init();
}
|
#vulnerable code
@Before
public void setup() throws DBException {
orientDBClient = new OrientDBClient();
Properties p = new Properties();
// TODO: Extract the property names into final variables in OrientDBClient
p.setProperty("orientdb.url", TEST_DB_URL);
orientDBClient.setProperties(p);
orientDBClient.init();
orientDBDictionary = orientDBClient.getDB().getDictionary();
}
#location 11
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void cleanup() throws DBException
{
// Get the measurements instance as this is the only client that should
// count clean up time like an update since autoflush is off.
Measurements _measurements = Measurements.getMeasurements();
try {
long st=System.nanoTime();
if (_hTable != null) {
_hTable.flushCommits();
}
synchronized(THREAD_COUNT) {
int threadCount = THREAD_COUNT.decrementAndGet();
if (threadCount <= 0 && _hConn != null) {
_hConn.close();
}
}
long en=System.nanoTime();
_measurements.measure("UPDATE", (int)((en-st)/1000));
} catch (IOException e) {
throw new DBException(e);
}
}
|
#vulnerable code
public void cleanup() throws DBException
{
// Get the measurements instance as this is the only client that should
// count clean up time like an update since autoflush is off.
Measurements _measurements = Measurements.getMeasurements();
try {
long st=System.nanoTime();
if (_hTable != null) {
_hTable.flushCommits();
}
if (_hConn != null) {
_hConn.close();
}
long en=System.nanoTime();
_measurements.measure("UPDATE", (int)((en-st)/1000));
} catch (IOException e) {
throw new DBException(e);
}
}
#location 9
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public int read(String table, String key, Set<String> fields,
HashMap<String, ByteIterator> result) {
try {
MongoCollection<Document> collection = database
.getCollection(table);
Document query = new Document("_id", key);
FindIterable<Document> findIterable = collection.find(query);
Document queryResult = null;
if (fields != null) {
Document projection = new Document();
for (String field : fields) {
projection.put(field, INCLUDE);
}
findIterable.projection(projection);
}
queryResult = findIterable.first();
if (queryResult != null) {
fillMap(result, queryResult);
}
return queryResult != null ? 0 : 1;
}
catch (Exception e) {
System.err.println(e.toString());
return 1;
}
}
|
#vulnerable code
@Override
public int read(String table, String key, Set<String> fields,
HashMap<String, ByteIterator> result) {
try {
MongoCollection<Document> collection = database
.getCollection(table);
Document query = new Document("_id", key);
FindIterable<Document> findIterable = collection
.withReadPreference(readPreference)
.find(query);
Document queryResult = null;
if (fields != null) {
Document projection = new Document();
for (String field : fields) {
projection.put(field, INCLUDE);
}
findIterable.projection(projection);
}
queryResult = findIterable.first();
if (queryResult != null) {
fillMap(result, queryResult);
}
return queryResult != null ? 0 : 1;
}
catch (Exception e) {
System.err.println(e.toString());
return 1;
}
}
#location 10
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public int update(String table, String key,
HashMap<String, ByteIterator> values) {
try {
MongoCollection<Document> collection = database
.getCollection(table);
Document query = new Document("_id", key);
Document fieldsToSet = new Document();
for (Map.Entry<String, ByteIterator> entry : values.entrySet()) {
fieldsToSet.put(entry.getKey(), entry.getValue().toArray());
}
Document update = new Document("$set", fieldsToSet);
UpdateResult result = collection.updateOne(query, update);
if (result.wasAcknowledged() && result.getMatchedCount() == 0) {
System.err.println("Nothing updated for key " + key);
return 1;
}
return 0;
}
catch (Exception e) {
System.err.println(e.toString());
return 1;
}
}
|
#vulnerable code
@Override
public int update(String table, String key,
HashMap<String, ByteIterator> values) {
try {
MongoCollection<Document> collection = database
.getCollection(table);
Document query = new Document("_id", key);
Document fieldsToSet = new Document();
for (Map.Entry<String, ByteIterator> entry : values.entrySet()) {
fieldsToSet.put(entry.getKey(), entry.getValue().toArray());
}
Document update = new Document("$set", fieldsToSet);
UpdateResult result = collection.withWriteConcern(writeConcern)
.updateOne(query, update);
if (result.wasAcknowledged() && result.getMatchedCount() == 0) {
System.err.println("Nothing updated for key " + key);
return 1;
}
return 0;
}
catch (Exception e) {
System.err.println(e.toString());
return 1;
}
}
#location 15
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public Status scan(String table, String startkey, int recordcount,
Set<String> fields, Vector<HashMap<String, ByteIterator>> result) {
try {
PreparedStatement stmt = (fields == null) ? scanAllStmt.get() : scanStmts.get(fields);
// Prepare statement on demand
if (stmt == null) {
Select.Builder selectBuilder;
if (fields == null) {
selectBuilder = QueryBuilder.select().all();
} else {
selectBuilder = QueryBuilder.select();
for (String col : fields) {
((Select.Selection) selectBuilder).column(col);
}
}
Select selectStmt = selectBuilder.from(table);
// The statement builder is not setup right for tokens.
// So, we need to build it manually.
String initialStmt = selectStmt.toString();
StringBuilder scanStmt = new StringBuilder();
scanStmt.append(initialStmt.substring(0, initialStmt.length() - 1));
scanStmt.append(" WHERE ");
scanStmt.append(QueryBuilder.token(YCSB_KEY));
scanStmt.append(" >= ");
scanStmt.append("token(");
scanStmt.append(QueryBuilder.bindMarker());
scanStmt.append(")");
scanStmt.append(" LIMIT ");
scanStmt.append(QueryBuilder.bindMarker());
stmt = session.prepare(scanStmt.toString());
stmt.setConsistencyLevel(readConsistencyLevel);
if (trace) {
stmt.enableTracing();
}
PreparedStatement prevStmt = (fields == null) ?
scanAllStmt.getAndSet(stmt) :
scanStmts.putIfAbsent(new HashSet(fields), stmt);
if (prevStmt != null) {
stmt = prevStmt;
}
}
logger.debug(stmt.getQueryString());
logger.debug("startKey = {}, recordcount = {}", startkey, recordcount);
ResultSet rs = session.execute(stmt.bind(startkey, Integer.valueOf(recordcount)));
HashMap<String, ByteIterator> tuple;
while (!rs.isExhausted()) {
Row row = rs.one();
tuple = new HashMap<String, ByteIterator>();
ColumnDefinitions cd = row.getColumnDefinitions();
for (ColumnDefinitions.Definition def : cd) {
ByteBuffer val = row.getBytesUnsafe(def.getName());
if (val != null) {
tuple.put(def.getName(), new ByteArrayByteIterator(val.array()));
} else {
tuple.put(def.getName(), null);
}
}
result.add(tuple);
}
return Status.OK;
} catch (Exception e) {
logger.error(
MessageFormatter.format("Error scanning with startkey: {}", startkey).getMessage(), e);
return Status.ERROR;
}
}
|
#vulnerable code
@Override
public Status scan(String table, String startkey, int recordcount,
Set<String> fields, Vector<HashMap<String, ByteIterator>> result) {
try {
Statement stmt;
Select.Builder selectBuilder;
if (fields == null) {
selectBuilder = QueryBuilder.select().all();
} else {
selectBuilder = QueryBuilder.select();
for (String col : fields) {
((Select.Selection) selectBuilder).column(col);
}
}
stmt = selectBuilder.from(table);
// The statement builder is not setup right for tokens.
// So, we need to build it manually.
String initialStmt = stmt.toString();
StringBuilder scanStmt = new StringBuilder();
scanStmt.append(initialStmt.substring(0, initialStmt.length() - 1));
scanStmt.append(" WHERE ");
scanStmt.append(QueryBuilder.token(YCSB_KEY));
scanStmt.append(" >= ");
scanStmt.append("token('");
scanStmt.append(startkey);
scanStmt.append("')");
scanStmt.append(" LIMIT ");
scanStmt.append(recordcount);
stmt = new SimpleStatement(scanStmt.toString());
stmt.setConsistencyLevel(readConsistencyLevel);
if (debug) {
System.out.println(stmt.toString());
}
if (trace) {
stmt.enableTracing();
}
ResultSet rs = session.execute(stmt);
HashMap<String, ByteIterator> tuple;
while (!rs.isExhausted()) {
Row row = rs.one();
tuple = new HashMap<String, ByteIterator>();
ColumnDefinitions cd = row.getColumnDefinitions();
for (ColumnDefinitions.Definition def : cd) {
ByteBuffer val = row.getBytesUnsafe(def.getName());
if (val != null) {
tuple.put(def.getName(), new ByteArrayByteIterator(val.array()));
} else {
tuple.put(def.getName(), null);
}
}
result.add(tuple);
}
return Status.OK;
} catch (Exception e) {
e.printStackTrace();
System.out.println("Error scanning with startkey: " + startkey);
return Status.ERROR;
}
}
#location 37
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public Status read(String table, String key, Set<String> fields,
Map<String, ByteIterator> result) {
try {
PreparedStatement stmt = (fields == null) ? readAllStmt.get() : readStmts.get(fields);
// Prepare statement on demand
if (stmt == null) {
Select.Builder selectBuilder;
if (fields == null) {
selectBuilder = QueryBuilder.select().all();
} else {
selectBuilder = QueryBuilder.select();
for (String col : fields) {
((Select.Selection) selectBuilder).column(col);
}
}
stmt = session.prepare(selectBuilder.from(table)
.where(QueryBuilder.eq(YCSB_KEY, QueryBuilder.bindMarker()))
.limit(1));
stmt.setConsistencyLevel(readConsistencyLevel);
if (trace) {
stmt.enableTracing();
}
PreparedStatement prevStmt = (fields == null) ?
readAllStmt.getAndSet(stmt) :
readStmts.putIfAbsent(new HashSet(fields), stmt);
if (prevStmt != null) {
stmt = prevStmt;
}
}
logger.debug(stmt.getQueryString());
logger.debug("key = {}", key);
ResultSet rs = session.execute(stmt.bind(key));
if (rs.isExhausted()) {
return Status.NOT_FOUND;
}
// Should be only 1 row
Row row = rs.one();
ColumnDefinitions cd = row.getColumnDefinitions();
for (ColumnDefinitions.Definition def : cd) {
ByteBuffer val = row.getBytesUnsafe(def.getName());
if (val != null) {
result.put(def.getName(), new ByteArrayByteIterator(val.array()));
} else {
result.put(def.getName(), null);
}
}
return Status.OK;
} catch (Exception e) {
logger.error(MessageFormatter.format("Error reading key: {}", key).getMessage(), e);
return Status.ERROR;
}
}
|
#vulnerable code
@Override
public Status read(String table, String key, Set<String> fields,
Map<String, ByteIterator> result) {
try {
Statement stmt;
Select.Builder selectBuilder;
if (fields == null) {
selectBuilder = QueryBuilder.select().all();
} else {
selectBuilder = QueryBuilder.select();
for (String col : fields) {
((Select.Selection) selectBuilder).column(col);
}
}
stmt = selectBuilder.from(table).where(QueryBuilder.eq(YCSB_KEY, key))
.limit(1);
stmt.setConsistencyLevel(readConsistencyLevel);
if (debug) {
System.out.println(stmt.toString());
}
if (trace) {
stmt.enableTracing();
}
ResultSet rs = session.execute(stmt);
if (rs.isExhausted()) {
return Status.NOT_FOUND;
}
// Should be only 1 row
Row row = rs.one();
ColumnDefinitions cd = row.getColumnDefinitions();
for (ColumnDefinitions.Definition def : cd) {
ByteBuffer val = row.getBytesUnsafe(def.getName());
if (val != null) {
result.put(def.getName(), new ByteArrayByteIterator(val.array()));
} else {
result.put(def.getName(), null);
}
}
return Status.OK;
} catch (Exception e) {
e.printStackTrace();
System.out.println("Error reading key: " + key);
return Status.ERROR;
}
}
#location 21
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public int scan(String table, String startkey, int recordcount,
Set<String> fields, Vector<HashMap<String, ByteIterator>> result) {
MongoCursor<Document> cursor = null;
try {
MongoCollection<Document> collection = database
.getCollection(table);
Document scanRange = new Document("$gte", startkey);
Document query = new Document("_id", scanRange);
Document sort = new Document("_id", INCLUDE);
Document projection = null;
if (fields != null) {
projection = new Document();
for (String fieldName : fields) {
projection.put(fieldName, INCLUDE);
}
}
cursor = collection.find(query)
.projection(projection).sort(sort).limit(recordcount).iterator();
if (!cursor.hasNext()) {
System.err.println("Nothing found in scan for key " + startkey);
return 1;
}
while (cursor.hasNext()) {
HashMap<String, ByteIterator> resultMap = new HashMap<String, ByteIterator>();
Document obj = cursor.next();
fillMap(resultMap, obj);
result.add(resultMap);
}
return 0;
}
catch (Exception e) {
System.err.println(e.toString());
return 1;
}
finally {
if (cursor != null) {
cursor.close();
}
}
}
|
#vulnerable code
@Override
public int scan(String table, String startkey, int recordcount,
Set<String> fields, Vector<HashMap<String, ByteIterator>> result) {
MongoCursor<Document> cursor = null;
try {
MongoCollection<Document> collection = database
.getCollection(table);
Document scanRange = new Document("$gte", startkey);
Document query = new Document("_id", scanRange);
Document sort = new Document("_id", INCLUDE);
Document projection = null;
if (fields != null) {
projection = new Document();
for (String fieldName : fields) {
projection.put(fieldName, INCLUDE);
}
}
cursor = collection.withReadPreference(readPreference).find(query)
.projection(projection).sort(sort).limit(recordcount).iterator();
if (!cursor.hasNext()) {
System.err.println("Nothing found in scan for key " + startkey);
return 1;
}
while (cursor.hasNext()) {
HashMap<String, ByteIterator> resultMap = new HashMap<String, ByteIterator>();
Document obj = cursor.next();
fillMap(resultMap, obj);
result.add(resultMap);
}
return 0;
}
catch (Exception e) {
System.err.println(e.toString());
return 1;
}
finally {
if (cursor != null) {
cursor.close();
}
}
}
#location 20
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void init() throws DBException {
Properties props = getProperties();
String url = props.getProperty(URL_PROPERTY);
String user = props.getProperty(USER_PROPERTY, USER_PROPERTY_DEFAULT);
String password = props.getProperty(PASSWORD_PROPERTY, PASSWORD_PROPERTY_DEFAULT);
Boolean newdb = Boolean.parseBoolean(props.getProperty(NEWDB_PROPERTY, NEWDB_PROPERTY_DEFAULT));
String remoteStorageType = props.getProperty(STORAGE_TYPE_PROPERTY);
String intent = props.getProperty(INTENT_PROPERTY, INTENT_PROPERTY_DEFAULT);
Boolean dotransactions = Boolean.parseBoolean(
props.getProperty(DO_TRANSACTIONS_PROPERTY, DO_TRANSACTIONS_PROPERTY_DEFAULT));
if (url == null) {
throw new DBException(String.format("Required property \"%s\" missing for OrientDBClient", URL_PROPERTY));
}
log.info("OrientDB loading database url = " + url);
// If using a remote database, use the OServerAdmin interface to connect
if (url.startsWith(OEngineRemote.NAME)) {
isRemote = true;
if (remoteStorageType == null) {
throw new DBException("When connecting to a remote OrientDB instance, " +
"specify a database storage type (plocal or memory) with " + STORAGE_TYPE_PROPERTY);
}
try {
OServerAdmin server = new OServerAdmin(url).connect(user, password);
if (server.existsDatabase()) {
if (newdb && !dotransactions) {
log.info("OrientDB dropping and recreating fresh db on remote server.");
server.dropDatabase(remoteStorageType);
server.createDatabase(server.getURL(), ORIENTDB_DOCUMENT_TYPE, remoteStorageType);
}
} else {
log.info("OrientDB database not found, creating fresh db");
server.createDatabase(server.getURL(), ORIENTDB_DOCUMENT_TYPE, remoteStorageType);
}
server.close();
db = new ODatabaseDocumentTx(url).open(user, password);
} catch (IOException | OException e) {
throw new DBException(String.format("Error interfacing with %s", url), e);
}
} else {
try {
db = new ODatabaseDocumentTx(url);
if (db.exists()) {
db.open(user, password);
if (newdb && !dotransactions) {
log.info("OrientDB dropping and recreating fresh db.");
db.drop();
db.create();
}
} else {
log.info("OrientDB database not found, creating fresh db");
db.create();
}
} catch (ODatabaseException e) {
throw new DBException(String.format("Error interfacing with %s", url), e);
}
}
log.info("OrientDB connection created with " + url);
dictionary = db.getMetadata().getIndexManager().getDictionary();
if (!db.getMetadata().getSchema().existsClass(CLASS)) {
db.getMetadata().getSchema().createClass(CLASS);
}
if (intent.equals(ORIENTDB_MASSIVEINSERT)) {
log.info("Declaring intent of MassiveInsert.");
db.declareIntent(new OIntentMassiveInsert());
} else if (intent.equals(ORIENTDB_MASSIVEREAD)) {
log.info("Declaring intent of MassiveRead.");
db.declareIntent(new OIntentMassiveRead());
} else if (intent.equals(ORIENTDB_NOCACHE)) {
log.info("Declaring intent of NoCache.");
db.declareIntent(new OIntentNoCache());
}
}
|
#vulnerable code
@Override
public void init() throws DBException {
Properties props = getProperties();
String url = props.getProperty(URL_PROPERTY);
String user = props.getProperty(USER_PROPERTY, USER_PROPERTY_DEFAULT);
String password = props.getProperty(PASSWORD_PROPERTY, PASSWORD_PROPERTY_DEFAULT);
Boolean newdb = Boolean.parseBoolean(props.getProperty(NEWDB_PROPERTY, NEWDB_PROPERTY_DEFAULT));
String remoteStorageType = props.getProperty(STORAGE_TYPE_PROPERTY);
Boolean dotransactions = Boolean.parseBoolean(
props.getProperty(DO_TRANSACTIONS_PROPERTY, DO_TRANSACTIONS_PROPERTY_DEFAULT));
if (url == null) {
throw new DBException(String.format("Required property \"%s\" missing for OrientDBClient", URL_PROPERTY));
}
log.info("OrientDB loading database url = " + url);
// If using a remote database, use the OServerAdmin interface to connect
if (url.startsWith(OEngineRemote.NAME)) {
isRemote = true;
if (remoteStorageType == null) {
throw new DBException("When connecting to a remote OrientDB instance, " +
"specify a database storage type (plocal or memory) with " + STORAGE_TYPE_PROPERTY);
}
try {
OServerAdmin server = new OServerAdmin(url).connect(user, password);
if (server.existsDatabase()) {
if (newdb && !dotransactions) {
log.info("OrientDB dropping and recreating fresh db on remote server.");
server.dropDatabase(remoteStorageType);
server.createDatabase(server.getURL(), ORIENTDB_DOCUMENT_TYPE, remoteStorageType);
}
} else {
log.info("OrientDB database not found, creating fresh db");
server.createDatabase(server.getURL(), ORIENTDB_DOCUMENT_TYPE, remoteStorageType);
}
server.close();
db = new ODatabaseDocumentTx(url).open(user, password);
} catch (IOException | OException e) {
throw new DBException(String.format("Error interfacing with %s", url), e);
}
} else {
try {
db = new ODatabaseDocumentTx(url);
if (db.exists()) {
db.open(user, password);
if (newdb && !dotransactions) {
log.info("OrientDB dropping and recreating fresh db.");
db.drop();
db.create();
}
} else {
log.info("OrientDB database not found, creating fresh db");
db.create();
}
} catch (ODatabaseException e) {
throw new DBException(String.format("Error interfacing with %s", url), e);
}
}
log.info("OrientDB connection created with " + url);
dictionary = db.getMetadata().getIndexManager().getDictionary();
if (!db.getMetadata().getSchema().existsClass(CLASS)) {
db.getMetadata().getSchema().createClass(CLASS);
}
db.declareIntent(new OIntentMassiveInsert());
}
#location 42
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public Status delete(String table, String key) {
try {
PreparedStatement stmt = deleteStmt.get();
// Prepare statement on demand
if (stmt == null) {
stmt = session.prepare(QueryBuilder.delete().from(table)
.where(QueryBuilder.eq(YCSB_KEY, QueryBuilder.bindMarker())));
stmt.setConsistencyLevel(writeConsistencyLevel);
if (trace) {
stmt.enableTracing();
}
PreparedStatement prevStmt = deleteStmt.getAndSet(stmt);
if (prevStmt != null) {
stmt = prevStmt;
}
}
logger.debug(stmt.getQueryString());
logger.debug("key = {}", key);
session.execute(stmt.bind(key));
return Status.OK;
} catch (Exception e) {
logger.error(MessageFormatter.format("Error deleting key: {}", key).getMessage(), e);
}
return Status.ERROR;
}
|
#vulnerable code
@Override
public Status delete(String table, String key) {
try {
Statement stmt;
stmt = QueryBuilder.delete().from(table)
.where(QueryBuilder.eq(YCSB_KEY, key));
stmt.setConsistencyLevel(writeConsistencyLevel);
if (debug) {
System.out.println(stmt.toString());
}
if (trace) {
stmt.enableTracing();
}
session.execute(stmt);
return Status.OK;
} catch (Exception e) {
e.printStackTrace();
System.out.println("Error deleting key: " + key);
}
return Status.ERROR;
}
#location 11
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Before
public void setUp() throws Exception {
session = cassandraUnit.getSession();
Properties p = new Properties();
p.setProperty("hosts", HOST);
p.setProperty("port", Integer.toString(PORT));
p.setProperty("table", TABLE);
Measurements.setProperties(p);
final CoreWorkload workload = new CoreWorkload();
workload.init(p);
client = new CassandraCQLClient();
client.setProperties(p);
client.init();
}
|
#vulnerable code
@Before
public void setUp() throws Exception {
// check that this is Java 8+
int javaVersion = Integer.parseInt(System.getProperty("java.version").split("\\.")[1]);
Assume.assumeTrue(javaVersion >= 8);
session = cassandraUnit.getSession();
Properties p = new Properties();
p.setProperty("hosts", HOST);
p.setProperty("port", Integer.toString(PORT));
p.setProperty("table", TABLE);
Measurements.setProperties(p);
final CoreWorkload workload = new CoreWorkload();
workload.init(p);
client = new CassandraCQLClient();
client.setProperties(p);
client.init();
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void cleanup() throws DBException {
// Get the measurements instance as this is the only client that should
// count clean up time like an update if client-side buffering is
// enabled.
Measurements measurements = Measurements.getMeasurements();
try {
long st = System.nanoTime();
if (bufferedMutator != null) {
bufferedMutator.close();
}
if (currentTable != null) {
currentTable.close();
}
long en = System.nanoTime();
final String type = clientSideBuffering ? "UPDATE" : "CLEANUP";
measurements.measure(type, (int) ((en - st) / 1000));
threadCount.decrementAndGet();
if (threadCount.get() <= 0) {
// Means we are done so ok to shut down the Connection.
synchronized (CONNECTION_LOCK) {
if (connection != null) {
connection.close();
connection = null;
}
}
}
} catch (IOException e) {
throw new DBException(e);
}
}
|
#vulnerable code
@Override
public void cleanup() throws DBException {
// Get the measurements instance as this is the only client that should
// count clean up time like an update if client-side buffering is
// enabled.
Measurements measurements = Measurements.getMeasurements();
try {
long st = System.nanoTime();
if (bufferedMutator != null) {
bufferedMutator.close();
}
if (currentTable != null) {
currentTable.close();
}
long en = System.nanoTime();
final String type = clientSideBuffering ? "UPDATE" : "CLEANUP";
measurements.measure(type, (int) ((en - st) / 1000));
synchronized(threadCount) {
--threadCount;
if (threadCount <= 0 && connection != null) {
connection.close();
connection = null;
}
}
} catch (IOException e) {
throw new DBException(e);
}
}
#location 18
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private Map<String, byte[]> convertToBytearrayMap(Map<String,ByteIterator> values) {
Map<String, byte[]> retVal = new HashMap<String, byte[]>();
for (Map.Entry<String, ByteIterator> entry : values.entrySet()) {
retVal.put(entry.getKey(), entry.getValue().toArray());
}
return retVal;
}
|
#vulnerable code
private Map<String, byte[]> convertToBytearrayMap(Map<String,ByteIterator> values) {
Map<String, byte[]> retVal = new HashMap<String, byte[]>();
for (String key : values.keySet()) {
retVal.put(key, values.get(key).toArray());
}
return retVal;
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void cleanup() throws DBException {
if (asyncExecutor != null) {
try {
asyncExecutor.flush();
} catch (IOException e) {
throw new DBException(e);
}
}
synchronized (CONFIG) {
--threadCount;
if (threadCount <= 0) {
try {
session.close();
} catch (IOException e) {
throw new DBException(e);
}
}
}
}
|
#vulnerable code
@Override
public void cleanup() throws DBException {
if (asyncExecutor != null) {
try {
asyncExecutor.flush();
} catch (IOException e) {
throw new DBException(e);
}
}
synchronized (threadCount) {
--threadCount;
if (threadCount <= 0) {
try {
session.close();
} catch (IOException e) {
throw new DBException(e);
}
}
}
}
#location 10
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void init() throws DBException {
if ("true"
.equals(getProperties().getProperty("clientbuffering", "false"))) {
this.clientSideBuffering = true;
}
if (getProperties().containsKey("writebuffersize")) {
writeBufferSize =
Long.parseLong(getProperties().getProperty("writebuffersize"));
}
if (getProperties().getProperty("durability") != null) {
this.durability =
Durability.valueOf(getProperties().getProperty("durability"));
}
if ("kerberos".equalsIgnoreCase(config.get("hbase.security.authentication"))) {
config.set("hadoop.security.authentication", "Kerberos");
UserGroupInformation.setConfiguration(config);
}
if ((getProperties().getProperty("principal")!=null)
&& (getProperties().getProperty("keytab")!=null)) {
try {
UserGroupInformation.loginUserFromKeytab(getProperties().getProperty("principal"),
getProperties().getProperty("keytab"));
} catch (IOException e) {
System.err.println("Keytab file is not readable or not found");
throw new DBException(e);
}
}
try {
threadCount.getAndIncrement();
synchronized (CONNECTION_LOCK) {
if (connection == null) {
// Initialize if not set up already.
connection = ConnectionFactory.createConnection(config);
}
}
} catch (java.io.IOException e) {
throw new DBException(e);
}
if ((getProperties().getProperty("debug") != null)
&& (getProperties().getProperty("debug").compareTo("true") == 0)) {
debug = true;
}
if ("false"
.equals(getProperties().getProperty("hbase.usepagefilter", "true"))) {
usePageFilter = false;
}
columnFamily = getProperties().getProperty("columnfamily");
if (columnFamily == null) {
System.err.println("Error, must specify a columnfamily for HBase table");
throw new DBException("No columnfamily specified");
}
columnFamilyBytes = Bytes.toBytes(columnFamily);
// Terminate right now if table does not exist, since the client
// will not propagate this error upstream once the workload
// starts.
String table = com.yahoo.ycsb.workloads.CoreWorkload.table;
try {
final TableName tName = TableName.valueOf(table);
synchronized (CONNECTION_LOCK) {
connection.getTable(tName).getTableDescriptor();
}
} catch (IOException e) {
throw new DBException(e);
}
}
|
#vulnerable code
@Override
public void init() throws DBException {
if ("true"
.equals(getProperties().getProperty("clientbuffering", "false"))) {
this.clientSideBuffering = true;
}
if (getProperties().containsKey("writebuffersize")) {
writeBufferSize =
Long.parseLong(getProperties().getProperty("writebuffersize"));
}
if (getProperties().getProperty("durability") != null) {
this.durability =
Durability.valueOf(getProperties().getProperty("durability"));
}
if ("kerberos".equalsIgnoreCase(config.get("hbase.security.authentication"))) {
config.set("hadoop.security.authentication", "Kerberos");
UserGroupInformation.setConfiguration(config);
}
if ((getProperties().getProperty("principal")!=null)
&& (getProperties().getProperty("keytab")!=null)) {
try {
UserGroupInformation.loginUserFromKeytab(getProperties().getProperty("principal"),
getProperties().getProperty("keytab"));
} catch (IOException e) {
System.err.println("Keytab file is not readable or not found");
throw new DBException(e);
}
}
try {
synchronized(threadCount) {
++threadCount;
if (connection == null) {
connection = ConnectionFactory.createConnection(config);
}
}
} catch (java.io.IOException e) {
throw new DBException(e);
}
if ((getProperties().getProperty("debug") != null)
&& (getProperties().getProperty("debug").compareTo("true") == 0)) {
debug = true;
}
if ("false"
.equals(getProperties().getProperty("hbase.usepagefilter", "true"))) {
usePageFilter = false;
}
columnFamily = getProperties().getProperty("columnfamily");
if (columnFamily == null) {
System.err.println("Error, must specify a columnfamily for HBase table");
throw new DBException("No columnfamily specified");
}
columnFamilyBytes = Bytes.toBytes(columnFamily);
// Terminate right now if table does not exist, since the client
// will not propagate this error upstream once the workload
// starts.
String table = com.yahoo.ycsb.workloads.CoreWorkload.table;
try {
final TableName tName = TableName.valueOf(table);
connection.getTable(tName).getTableDescriptor();
} catch (IOException e) {
throw new DBException(e);
}
}
#location 34
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void getHTable(String table) throws IOException {
final TableName tName = TableName.valueOf(table);
this.currentTable = connection.getTable(tName);
if (clientSideBuffering) {
final BufferedMutatorParams p = new BufferedMutatorParams(tName);
p.writeBufferSize(writeBufferSize);
this.bufferedMutator = connection.getBufferedMutator(p);
}
}
|
#vulnerable code
public void getHTable(String table) throws IOException {
final TableName tName = TableName.valueOf(table);
synchronized (CONNECTION_LOCK) {
this.currentTable = connection.getTable(tName);
if (clientSideBuffering) {
final BufferedMutatorParams p = new BufferedMutatorParams(tName);
p.writeBufferSize(writeBufferSize);
this.bufferedMutator = connection.getBufferedMutator(p);
}
}
}
#location 7
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void init() throws DBException {
Properties props = getProperties();
String url = props.getProperty(URL_PROPERTY);
String user = props.getProperty(USER_PROPERTY, USER_PROPERTY_DEFAULT);
String password = props.getProperty(PASSWORD_PROPERTY, PASSWORD_PROPERTY_DEFAULT);
Boolean newdb = Boolean.parseBoolean(props.getProperty(NEWDB_PROPERTY, NEWDB_PROPERTY_DEFAULT));
if (url == null) {
throw new DBException(String.format("Required property \"%s\" missing for OrientDBClient", URL_PROPERTY));
}
try {
System.out.println("OrientDB loading database url = " + url);
OGlobalConfiguration.STORAGE_KEEP_OPEN.setValue(false);
db = new ODatabaseDocumentTx(url);
if (db.exists()) {
db.open(user, password);
if (newdb) {
System.out.println("OrientDB drop and recreate fresh db");
db.drop();
db.create();
}
} else {
System.out.println("OrientDB database not found, create fresh db");
db.create();
}
System.out.println("OrientDB connection created with " + url);
dictionary = db.getMetadata().getIndexManager().getDictionary();
if (!db.getMetadata().getSchema().existsClass(CLASS))
db.getMetadata().getSchema().createClass(CLASS);
db.declareIntent(new OIntentMassiveInsert());
} catch (Exception e1) {
System.err.println("Could not initialize OrientDB connection pool for Loader: " + e1.toString());
e1.printStackTrace();
return;
}
}
|
#vulnerable code
public void init() throws DBException {
// initialize OrientDB driver
Properties props = getProperties();
String url;
if (System.getProperty("os.name").toLowerCase().contains("win"))
url = props.getProperty("orientdb.url", "plocal:C:/temp/databases/ycsb");
else
url = props.getProperty("orientdb.url", "plocal:/temp/databases/ycsb");
String user = props.getProperty("orientdb.user", "admin");
String password = props.getProperty("orientdb.password", "admin");
Boolean newdb = Boolean.parseBoolean(props.getProperty("orientdb.newdb", "false"));
try {
System.out.println("OrientDB loading database url = " + url);
OGlobalConfiguration.STORAGE_KEEP_OPEN.setValue(false);
db = new ODatabaseDocumentTx(url);
if (db.exists()) {
db.open(user, password);
if (newdb) {
System.out.println("OrientDB drop and recreate fresh db");
db.drop();
db.create();
}
} else {
System.out.println("OrientDB database not found, create fresh db");
db.create();
}
System.out.println("OrientDB connection created with " + url);
dictionary = db.getMetadata().getIndexManager().getDictionary();
if (!db.getMetadata().getSchema().existsClass(CLASS))
db.getMetadata().getSchema().createClass(CLASS);
db.declareIntent(new OIntentMassiveInsert());
} catch (Exception e1) {
System.err.println("Could not initialize OrientDB connection pool for Loader: " + e1.toString());
e1.printStackTrace();
return;
}
}
#location 6
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public Status update(String table, String key,
HashMap<String, ByteIterator> values) {
if (debug) {
System.out.println("Setting up put for key: " + key);
}
setTable(table);
final MutateRowRequest.Builder rowMutation = MutateRowRequest.newBuilder();
rowMutation.setRowKey(ByteString.copyFromUtf8(key));
rowMutation.setTableNameBytes(ByteStringer.wrap(lastTableBytes));
for (final Entry<String, ByteIterator> entry : values.entrySet()) {
final Mutation.Builder mutationBuilder = rowMutation.addMutationsBuilder();
final SetCell.Builder setCellBuilder = mutationBuilder.getSetCellBuilder();
setCellBuilder.setFamilyNameBytes(ByteStringer.wrap(columnFamilyBytes));
setCellBuilder.setColumnQualifier(ByteStringer.wrap(entry.getKey().getBytes()));
setCellBuilder.setValue(ByteStringer.wrap(entry.getValue().toArray()));
// Bigtable uses a 1ms granularity
setCellBuilder.setTimestampMicros(System.currentTimeMillis() * 1000);
}
try {
if (clientSideBuffering) {
bulkMutation.add(rowMutation.build());
} else {
client.mutateRow(rowMutation.build());
}
return Status.OK;
} catch (RuntimeException e) {
System.err.println("Failed to insert key: " + key + " " + e.getMessage());
return Status.ERROR;
}
}
|
#vulnerable code
@Override
public Status update(String table, String key,
HashMap<String, ByteIterator> values) {
if (debug) {
System.out.println("Setting up put for key: " + key);
}
setTable(table);
final MutateRowRequest.Builder rowMutation = MutateRowRequest.newBuilder();
rowMutation.setRowKey(ByteString.copyFromUtf8(key));
rowMutation.setTableNameBytes(ByteStringer.wrap(lastTableBytes));
for (final Entry<String, ByteIterator> entry : values.entrySet()) {
final Mutation.Builder mutationBuilder = rowMutation.addMutationsBuilder();
final SetCell.Builder setCellBuilder = mutationBuilder.getSetCellBuilder();
setCellBuilder.setFamilyNameBytes(ByteStringer.wrap(columnFamilyBytes));
setCellBuilder.setColumnQualifier(ByteStringer.wrap(entry.getKey().getBytes()));
setCellBuilder.setValue(ByteStringer.wrap(entry.getValue().toArray()));
// Bigtable uses a 1ms granularity
setCellBuilder.setTimestampMicros(System.currentTimeMillis() * 1000);
}
try {
if (clientSideBuffering) {
asyncExecutor.mutateRowAsync(rowMutation.build());
} else {
client.mutateRow(rowMutation.build());
}
return Status.OK;
} catch (ServiceException e) {
System.err.println("Failed to insert key: " + key + " " + e.getMessage());
return Status.ERROR;
} catch (InterruptedException e) {
System.err.println("Interrupted while inserting key: " + key + " "
+ e.getMessage());
Thread.currentThread().interrupt();
return Status.ERROR; // never get here, but lets make the compiler happy
}
}
#location 28
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void getHTable(String table) throws IOException {
final TableName tName = TableName.valueOf(table);
synchronized (CONNECTION_LOCK) {
this.currentTable = connection.getTable(tName);
if (clientSideBuffering) {
final BufferedMutatorParams p = new BufferedMutatorParams(tName);
p.writeBufferSize(writeBufferSize);
this.bufferedMutator = connection.getBufferedMutator(p);
}
}
}
|
#vulnerable code
public void getHTable(String table) throws IOException {
final TableName tName = TableName.valueOf(table);
this.currentTable = this.connection.getTable(tName);
// suggestions from
// http://ryantwopointoh.blogspot.com/2009/01/
// performance-of-hbase-importing.html
if (clientSideBuffering) {
final BufferedMutatorParams p = new BufferedMutatorParams(tName);
p.writeBufferSize(writeBufferSize);
this.bufferedMutator = this.connection.getBufferedMutator(p);
}
}
#location 10
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public int insert(String table, String key,
HashMap<String, ByteIterator> values) {
try {
MongoCollection<Document> collection = database
.getCollection(table);
Document toInsert = new Document("_id", key);
for (Map.Entry<String, ByteIterator> entry : values.entrySet()) {
toInsert.put(entry.getKey(), entry.getValue().toArray());
}
bulkInserts.add(toInsert);
if (bulkInserts.size() == batchSize) {
collection.insertMany(bulkInserts, INSERT_MANY_OPTIONS);
bulkInserts.clear();
}
return 0;
}
catch (Exception e) {
System.err.println("Exception while trying bulk insert with "
+ bulkInserts.size());
e.printStackTrace();
return 1;
}
}
|
#vulnerable code
@Override
public int insert(String table, String key,
HashMap<String, ByteIterator> values) {
try {
MongoCollection<Document> collection = database
.getCollection(table);
Document toInsert = new Document("_id", key);
for (Map.Entry<String, ByteIterator> entry : values.entrySet()) {
toInsert.put(entry.getKey(), entry.getValue().toArray());
}
bulkInserts.add(toInsert);
if (bulkInserts.size() == batchSize) {
collection.withWriteConcern(writeConcern)
.insertMany(bulkInserts, INSERT_MANY_OPTIONS);
bulkInserts.clear();
}
return 0;
}
catch (Exception e) {
System.err.println("Exception while trying bulk insert with "
+ bulkInserts.size());
e.printStackTrace();
return 1;
}
}
#location 14
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void getHTable(String table) throws IOException {
final TableName tName = TableName.valueOf(table);
this.currentTable = connection.getTable(tName);
if (clientSideBuffering) {
final BufferedMutatorParams p = new BufferedMutatorParams(tName);
p.writeBufferSize(writeBufferSize);
this.bufferedMutator = connection.getBufferedMutator(p);
}
}
|
#vulnerable code
public void getHTable(String table) throws IOException {
final TableName tName = TableName.valueOf(table);
synchronized (CONNECTION_LOCK) {
this.currentTable = connection.getTable(tName);
if (clientSideBuffering) {
final BufferedMutatorParams p = new BufferedMutatorParams(tName);
p.writeBufferSize(writeBufferSize);
this.bufferedMutator = connection.getBufferedMutator(p);
}
}
}
#location 5
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void writeTo( Writer writer, WriterConfig config ) throws IOException {
if( writer == null ) {
throw new NullPointerException( "writer is null" );
}
if( config == null ) {
throw new NullPointerException( "config is null" );
}
WritingBuffer buffer = new WritingBuffer( writer, 128 );
write( config.createWriter( buffer ) );
buffer.flush();
}
|
#vulnerable code
public void writeTo( Writer writer, WriterConfig config ) throws IOException {
WritingBuffer buffer = new WritingBuffer( writer, 128 );
write( config == null ? new JsonWriter( buffer ) : config.createWriter( buffer ) );
buffer.flush();
}
#location 4
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
@CacheEvict(value={"metaCaches","metaCache"},allEntries=true,beforeInvocation=true)
public void saveMeta(String type, String name, Integer mid) {
if (StringUtils.isNotBlank(type) && StringUtils.isNotBlank(name)){
MetaCond metaCond = new MetaCond();
metaCond.setName(name);
metaCond.setType(type);
List<MetaDomain> metas = metaDao.getMetasByCond(metaCond);
if (null == metas || metas.size() == 0){
MetaDomain metaDomain = new MetaDomain();
metaDomain.setName(name);
if (null != mid){
MetaDomain meta = metaDao.getMetaById(mid);
if (null != meta)
metaDomain.setMid(mid);
metaDao.updateMeta(metaDomain);
//更新原有的文章分类
if(meta !=null) {
contentService.updateCategory(meta.getName(), name);
}
} else {
metaDomain.setType(type);
metaDao.addMeta(metaDomain);
}
} else {
throw BusinessException.withErrorCode(ErrorConstant.Meta.META_IS_EXIST);
}
}
}
|
#vulnerable code
@Override
@CacheEvict(value={"metaCaches","metaCache"},allEntries=true,beforeInvocation=true)
public void saveMeta(String type, String name, Integer mid) {
if (StringUtils.isNotBlank(type) && StringUtils.isNotBlank(name)){
MetaCond metaCond = new MetaCond();
metaCond.setName(name);
metaCond.setType(type);
List<MetaDomain> metas = metaDao.getMetasByCond(metaCond);
if (null == metas || metas.size() == 0){
MetaDomain metaDomain = new MetaDomain();
metaDomain.setName(name);
if (null != mid){
MetaDomain meta = metaDao.getMetaById(mid);
if (null != meta)
metaDomain.setMid(mid);
metaDao.updateMeta(metaDomain);
//更新原有的文章分类
contentService.updateCategory(meta.getName(), name);
} else {
metaDomain.setType(type);
metaDao.addMeta(metaDomain);
}
} else {
throw BusinessException.withErrorCode(ErrorConstant.Meta.META_IS_EXIST);
}
}
}
#location 19
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public String indexDataset(Dataset dataset) {
logger.info("indexing dataset " + dataset.getId());
/**
* @todo should we use solrDocIdentifierDataset or
* IndexableObject.IndexableTypes.DATASET.getName() + "_" ?
*/
// String solrIdPublished = solrDocIdentifierDataset + dataset.getId();
String solrIdPublished = determinePublishedDatasetSolrDocId(dataset);
String solrIdDraftDataset = IndexableObject.IndexableTypes.DATASET.getName() + "_" + dataset.getId() + IndexableDataset.DatasetState.WORKING_COPY.getSuffix();
// String solrIdDeaccessioned = IndexableObject.IndexableTypes.DATASET.getName() + "_" + dataset.getId() + IndexableDataset.DatasetState.DEACCESSIONED.getSuffix();
String solrIdDeaccessioned = determineDeaccesionedDatasetId(dataset);
StringBuilder debug = new StringBuilder();
debug.append("\ndebug:\n");
int numReleasedVersions = 0;
List<DatasetVersion> versions = dataset.getVersions();
for (DatasetVersion datasetVersion : versions) {
Long versionDatabaseId = datasetVersion.getId();
String versionTitle = datasetVersion.getTitle();
String semanticVersion = datasetVersion.getSemanticVersion();
DatasetVersion.VersionState versionState = datasetVersion.getVersionState();
if (versionState.equals(DatasetVersion.VersionState.RELEASED)) {
/**
* @todo for performance, should just query this rather than
* iterating. Would need a new SQL query/method
*/
numReleasedVersions += 1;
}
debug.append("version found with database id " + versionDatabaseId + "\n");
debug.append("- title: " + versionTitle + "\n");
debug.append("- semanticVersion-VersionState: " + semanticVersion + "-" + versionState + "\n");
List<FileMetadata> fileMetadatas = datasetVersion.getFileMetadatas();
List<String> fileInfo = new ArrayList<>();
for (FileMetadata fileMetadata : fileMetadatas) {
fileInfo.add(fileMetadata.getDataFile().getId() + ":" + fileMetadata.getLabel());
}
int numFiles = 0;
if (fileMetadatas != null) {
numFiles = fileMetadatas.size();
}
debug.append("- files: " + numFiles + " " + fileInfo.toString() + "\n");
}
DatasetVersion latestVersion = dataset.getLatestVersion();
String latestVersionStateString = latestVersion.getVersionState().name();
DatasetVersion.VersionState latestVersionState = latestVersion.getVersionState();
DatasetVersion releasedVersion = dataset.getReleasedVersion();
if (releasedVersion != null) {
if (releasedVersion.getVersionState().equals(DatasetVersion.VersionState.DEACCESSIONED)) {
DatasetVersion lookupAttempt2 = releasedVersion.getMostRecentlyReleasedVersion();
String message = "WARNING: called dataset.getReleasedVersion() but version returned was deaccessioned (database id "
+ releasedVersion.getId()
+ "). (releasedVersion.getMostRecentlyReleasedVersion() returns database id "
+ lookupAttempt2.getId() + " so that method may be better?). Look out for strange indexing results.";
logger.severe(message);
debug.append(message);
}
}
Map<DatasetVersion.VersionState, Boolean> desiredCards = new LinkedHashMap<>();
/**
* @todo refactor all of this below and have a single method that takes
* the map of desired cards (which correspond to Solr documents) as one
* of the arguments and does all the operations necessary to achieve the
* desired state.
*/
StringBuilder results = new StringBuilder();
if (numReleasedVersions == 0) {
results.append("No published version, nothing will be indexed as ")
.append(solrIdPublished).append("\n");
if (latestVersionState.equals(DatasetVersion.VersionState.DRAFT)) {
desiredCards.put(DatasetVersion.VersionState.DRAFT, true);
IndexableDataset indexableDraftVersion = new IndexableDataset(latestVersion);
String indexDraftResult = addOrUpdateDataset(indexableDraftVersion);
results.append("The latest version is a working copy (latestVersionState: ")
.append(latestVersionStateString).append(") and indexing was attempted for ")
.append(solrIdDraftDataset).append(" (limited discoverability). Result: ")
.append(indexDraftResult).append("\n");
desiredCards.put(DatasetVersion.VersionState.DEACCESSIONED, false);
String deleteDeaccessionedResult = removeDeaccessioned(dataset);
results.append("Draft exists, no need for deaccessioned version. Deletion attempted for ")
.append(solrIdDeaccessioned).append(" (and files). Result: ").append(deleteDeaccessionedResult);
desiredCards.put(DatasetVersion.VersionState.RELEASED, false);
/**
* @todo delete published?
*/
/**
* Desired state for existence of cards: {DRAFT=true,
* DEACCESSIONED=false, RELEASED=false}
*
* No published version, nothing will be indexed as dataset_17
*
* The latest version is a working copy (latestVersionState:
* DRAFT) and indexing was attempted for dataset_17_draft
* (limited discoverability). Result: indexed dataset 17 as
* dataset_17_draft. filesIndexed: [datafile_18_draft]
*
* Draft exists, no need for deaccessioned version. Deletion
* attempted for dataset_17_deaccessioned (and files). Result:
* Attempted to delete dataset_17_deaccessioned from Solr index.
* updateReponse was:
* {responseHeader={status=0,QTime=0}}Attempted to delete
* datafile_18_deaccessioned from Solr index. updateReponse was:
* {responseHeader={status=0,QTime=0}}
*/
String result = getDesiredCardState(desiredCards) + results.toString() + debug.toString();
logger.info(result);
return result;
} else if (latestVersionState.equals(DatasetVersion.VersionState.DEACCESSIONED)) {
desiredCards.put(DatasetVersion.VersionState.DEACCESSIONED, true);
IndexableDataset indexableDeaccessionedVersion = new IndexableDataset(latestVersion);
String indexDeaccessionedVersionResult = addOrUpdateDataset(indexableDeaccessionedVersion);
results.append("No draft version. Attempting to index as deaccessioned. Result: ").append(indexDeaccessionedVersionResult).append("\n");
desiredCards.put(DatasetVersion.VersionState.RELEASED, false);
String deletePublishedResults = removePublished(dataset);
results.append("No published version. Attempting to delete traces of published version from index. Result: ").append(deletePublishedResults);
desiredCards.put(DatasetVersion.VersionState.DRAFT, false);
/**
* @todo delete drafts?
*/
/**
* Desired state for existence of cards: {DEACCESSIONED=true,
* RELEASED=false, DRAFT=false}
*
* No published version, nothing will be indexed as dataset_17
*
* No draft version. Attempting to index as deaccessioned.
* Result: indexed dataset 17 as dataset_17_deaccessioned.
* filesIndexed: []
*
* No published version. Attempting to delete traces of
* published version from index. Result: Attempted to delete
* dataset_17 from Solr index. updateReponse was:
* {responseHeader={status=0,QTime=1}}Attempted to delete
* datafile_18 from Solr index. updateReponse was:
* {responseHeader={status=0,QTime=0}}
*/
String result = getDesiredCardState(desiredCards) + results.toString() + debug.toString();
logger.info(result);
return result;
} else {
return "No-op. Unexpected condition reached: No released version and latest version is neither draft nor deaccesioned";
}
} else if (numReleasedVersions > 0) {
results.append("Released versions found: ").append(numReleasedVersions)
.append(". Will attempt to index as ").append(solrIdPublished).append(" (discoverable by anonymous)\n");
if (latestVersionState.equals(DatasetVersion.VersionState.RELEASED)
|| latestVersionState.equals(DatasetVersion.VersionState.DEACCESSIONED)) {
desiredCards.put(DatasetVersion.VersionState.RELEASED, true);
IndexableDataset indexableReleasedVersion = new IndexableDataset(releasedVersion);
String indexReleasedVersionResult = addOrUpdateDataset(indexableReleasedVersion);
results.append("Attempted to index " + solrIdPublished).append(". Result: ").append(indexReleasedVersionResult).append("\n");
desiredCards.put(DatasetVersion.VersionState.DRAFT, false);
List<String> solrDocIdsForDraftFilesToDelete = findSolrDocIdsForDraftFilesToDelete(dataset);
String deleteDraftDatasetVersionResult = removeSolrDocFromIndex(solrIdDraftDataset);
StringBuilder deleteDraftFilesResults = new StringBuilder();
for (String doomed : solrDocIdsForDraftFilesToDelete) {
String result = removeSolrDocFromIndex(doomed);
deleteDraftFilesResults.append(result);
}
results.append("The latest version is published. Attempting to delete drafts. Result: ")
.append(deleteDraftDatasetVersionResult).append(deleteDraftFilesResults).append("\n");
desiredCards.put(DatasetVersion.VersionState.DEACCESSIONED, false);
String deleteDeaccessionedResult = removeDeaccessioned(dataset);
results.append("No need for deaccessioned version. Deletion attempted for ")
.append(solrIdDeaccessioned).append(". Result: ").append(deleteDeaccessionedResult);
/**
* Desired state for existence of cards: {RELEASED=true,
* DRAFT=false, DEACCESSIONED=false}
*
* Released versions found: 1. Will attempt to index as
* dataset_17 (discoverable by anonymous)
*
* Attempted to index dataset_17. Result: indexed dataset 17 as
* dataset_17. filesIndexed: [datafile_18]
*
* The latest version is published. Attempting to delete drafts.
* Result: Attempted to delete dataset_17_draft from Solr index.
* updateReponse was: {responseHeader={status=0,QTime=1}}
*
* No need for deaccessioned version. Deletion attempted for
* dataset_17_deaccessioned. Result: Attempted to delete
* dataset_17_deaccessioned from Solr index. updateReponse was:
* {responseHeader={status=0,QTime=1}}Attempted to delete
* datafile_18_deaccessioned from Solr index. updateReponse was:
* {responseHeader={status=0,QTime=0}}
*/
String result = getDesiredCardState(desiredCards) + results.toString() + debug.toString();
logger.info(result);
return result;
} else if (latestVersionState.equals(DatasetVersion.VersionState.DRAFT)) {
IndexableDataset indexableDraftVersion = new IndexableDataset(latestVersion);
desiredCards.put(DatasetVersion.VersionState.DRAFT, true);
String indexDraftResult = addOrUpdateDataset(indexableDraftVersion);
results.append("The latest version is a working copy (latestVersionState: ")
.append(latestVersionStateString).append(") and will be indexed as ")
.append(solrIdDraftDataset).append(" (limited visibility). Result: ").append(indexDraftResult).append("\n");
desiredCards.put(DatasetVersion.VersionState.RELEASED, true);
IndexableDataset indexableReleasedVersion = new IndexableDataset(releasedVersion);
String indexReleasedVersionResult = addOrUpdateDataset(indexableReleasedVersion);
results.append("There is a published version we will attempt to index. Result: ").append(indexReleasedVersionResult).append("\n");
desiredCards.put(DatasetVersion.VersionState.DEACCESSIONED, false);
String deleteDeaccessionedResult = removeDeaccessioned(dataset);
results.append("No need for deaccessioned version. Deletion attempted for ")
.append(solrIdDeaccessioned).append(". Result: ").append(deleteDeaccessionedResult);
/**
* Desired state for existence of cards: {DRAFT=true,
* RELEASED=true, DEACCESSIONED=false}
*
* Released versions found: 1. Will attempt to index as
* dataset_17 (discoverable by anonymous)
*
* The latest version is a working copy (latestVersionState:
* DRAFT) and will be indexed as dataset_17_draft (limited
* visibility). Result: indexed dataset 17 as dataset_17_draft.
* filesIndexed: [datafile_18_draft]
*
* There is a published version we will attempt to index.
* Result: indexed dataset 17 as dataset_17. filesIndexed:
* [datafile_18]
*
* No need for deaccessioned version. Deletion attempted for
* dataset_17_deaccessioned. Result: Attempted to delete
* dataset_17_deaccessioned from Solr index. updateReponse was:
* {responseHeader={status=0,QTime=1}}Attempted to delete
* datafile_18_deaccessioned from Solr index. updateReponse was:
* {responseHeader={status=0,QTime=0}}
*/
String result = getDesiredCardState(desiredCards) + results.toString() + debug.toString();
logger.info(result);
return result;
} else {
return "No-op. Unexpected condition reached: There is at least one published version but the latest version is neither published nor draft";
}
} else {
return "No-op. Unexpected condition reached: Negative number of released versions? Count was: " + numReleasedVersions;
}
}
|
#vulnerable code
public String indexDataset(Dataset dataset) {
logger.info("indexing dataset " + dataset.getId());
/**
* @todo should we use solrDocIdentifierDataset or
* IndexableObject.IndexableTypes.DATASET.getName() + "_" ?
*/
// String solrIdPublished = solrDocIdentifierDataset + dataset.getId();
String solrIdPublished = determinePublishedDatasetSolrDocId(dataset);
String solrIdDraftDataset = IndexableObject.IndexableTypes.DATASET.getName() + "_" + dataset.getId() + IndexableDataset.DatasetState.WORKING_COPY.getSuffix();
// String solrIdDeaccessioned = IndexableObject.IndexableTypes.DATASET.getName() + "_" + dataset.getId() + IndexableDataset.DatasetState.DEACCESSIONED.getSuffix();
String solrIdDeaccessioned = determineDeaccesionedDatasetId(dataset);
StringBuilder debug = new StringBuilder();
debug.append("\ndebug:\n");
int numReleasedVersions = 0;
List<DatasetVersion> versions = dataset.getVersions();
for (DatasetVersion datasetVersion : versions) {
Long versionDatabaseId = datasetVersion.getId();
String versionTitle = datasetVersion.getTitle();
String semanticVersion = datasetVersion.getSemanticVersion();
DatasetVersion.VersionState versionState = datasetVersion.getVersionState();
if (versionState.equals(DatasetVersion.VersionState.RELEASED)) {
/**
* @todo for performance, should just query this rather than
* iterating. Would need a new SQL query/method
*/
numReleasedVersions += 1;
}
debug.append("version found with database id " + versionDatabaseId + "\n");
debug.append("- title: " + versionTitle + "\n");
debug.append("- semanticVersion-VersionState: " + semanticVersion + "-" + versionState + "\n");
List<FileMetadata> fileMetadatas = datasetVersion.getFileMetadatas();
List<String> fileInfo = new ArrayList<>();
for (FileMetadata fileMetadata : fileMetadatas) {
fileInfo.add(fileMetadata.getDataFile().getId() + ":" + fileMetadata.getLabel());
}
int numFiles = 0;
if (fileMetadatas != null) {
numFiles = fileMetadatas.size();
}
debug.append("- files: " + numFiles + " " + fileInfo.toString() + "\n");
}
DatasetVersion latestVersion = dataset.getLatestVersion();
String latestVersionStateString = latestVersion.getVersionState().name();
DatasetVersion.VersionState latestVersionState = latestVersion.getVersionState();
DatasetVersion releasedVersion = dataset.getReleasedVersion();
if (releasedVersion.getVersionState().equals(DatasetVersion.VersionState.DEACCESSIONED)) {
logger.severe("WARNING: called dataset.getReleasedVersion() but version returned was deaccessioned. Look out for strange indexing results.");
}
Map<DatasetVersion.VersionState, Boolean> desiredCards = new LinkedHashMap<>();
/**
* @todo refactor all of this below and have a single method that takes
* the map of desired cards (which correspond to Solr documents) as one
* of the arguments and does all the operations necessary to achieve the
* desired state.
*/
StringBuilder results = new StringBuilder();
if (numReleasedVersions == 0) {
results.append("No published version, nothing will be indexed as ")
.append(solrIdPublished).append("\n");
if (latestVersionState.equals(DatasetVersion.VersionState.DRAFT)) {
desiredCards.put(DatasetVersion.VersionState.DRAFT, true);
IndexableDataset indexableDraftVersion = new IndexableDataset(latestVersion);
String indexDraftResult = addOrUpdateDataset(indexableDraftVersion);
results.append("The latest version is a working copy (latestVersionState: ")
.append(latestVersionStateString).append(") and indexing was attempted for ")
.append(solrIdDraftDataset).append(" (limited discoverability). Result: ")
.append(indexDraftResult).append("\n");
desiredCards.put(DatasetVersion.VersionState.DEACCESSIONED, false);
String deleteDeaccessionedResult = removeDeaccessioned(dataset);
results.append("Draft exists, no need for deaccessioned version. Deletion attempted for ")
.append(solrIdDeaccessioned).append(" (and files). Result: ").append(deleteDeaccessionedResult);
desiredCards.put(DatasetVersion.VersionState.RELEASED, false);
/**
* @todo delete published?
*/
/**
* Desired state for existence of cards: {DRAFT=true,
* DEACCESSIONED=false, RELEASED=false}
*
* No published version, nothing will be indexed as dataset_17
*
* The latest version is a working copy (latestVersionState:
* DRAFT) and indexing was attempted for dataset_17_draft
* (limited discoverability). Result: indexed dataset 17 as
* dataset_17_draft. filesIndexed: [datafile_18_draft]
*
* Draft exists, no need for deaccessioned version. Deletion
* attempted for dataset_17_deaccessioned (and files). Result:
* Attempted to delete dataset_17_deaccessioned from Solr index.
* updateReponse was:
* {responseHeader={status=0,QTime=0}}Attempted to delete
* datafile_18_deaccessioned from Solr index. updateReponse was:
* {responseHeader={status=0,QTime=0}}
*/
String result = getDesiredCardState(desiredCards) + results.toString() + debug.toString();
logger.info(result);
return result;
} else if (latestVersionState.equals(DatasetVersion.VersionState.DEACCESSIONED)) {
desiredCards.put(DatasetVersion.VersionState.DEACCESSIONED, true);
IndexableDataset indexableDeaccessionedVersion = new IndexableDataset(latestVersion);
String indexDeaccessionedVersionResult = addOrUpdateDataset(indexableDeaccessionedVersion);
results.append("No draft version. Attempting to index as deaccessioned. Result: ").append(indexDeaccessionedVersionResult).append("\n");
desiredCards.put(DatasetVersion.VersionState.RELEASED, false);
String deletePublishedResults = removePublished(dataset);
results.append("No published version. Attempting to delete traces of published version from index. Result: ").append(deletePublishedResults);
desiredCards.put(DatasetVersion.VersionState.DRAFT, false);
/**
* @todo delete drafts?
*/
/**
* Desired state for existence of cards: {DEACCESSIONED=true,
* RELEASED=false, DRAFT=false}
*
* No published version, nothing will be indexed as dataset_17
*
* No draft version. Attempting to index as deaccessioned.
* Result: indexed dataset 17 as dataset_17_deaccessioned.
* filesIndexed: []
*
* No published version. Attempting to delete traces of
* published version from index. Result: Attempted to delete
* dataset_17 from Solr index. updateReponse was:
* {responseHeader={status=0,QTime=1}}Attempted to delete
* datafile_18 from Solr index. updateReponse was:
* {responseHeader={status=0,QTime=0}}
*/
String result = getDesiredCardState(desiredCards) + results.toString() + debug.toString();
logger.info(result);
return result;
} else {
return "No-op. Unexpected condition reached: No released version and latest version is neither draft nor deaccesioned";
}
} else if (numReleasedVersions > 0) {
results.append("Released versions found: ").append(numReleasedVersions)
.append(". Will attempt to index as ").append(solrIdPublished).append(" (discoverable by anonymous)\n");
if (latestVersionState.equals(DatasetVersion.VersionState.RELEASED)
|| latestVersionState.equals(DatasetVersion.VersionState.DEACCESSIONED)) {
desiredCards.put(DatasetVersion.VersionState.RELEASED, true);
IndexableDataset indexableReleasedVersion = new IndexableDataset(releasedVersion);
String indexReleasedVersionResult = addOrUpdateDataset(indexableReleasedVersion);
results.append("Attempted to index " + solrIdPublished).append(". Result: ").append(indexReleasedVersionResult).append("\n");
desiredCards.put(DatasetVersion.VersionState.DRAFT, false);
List<String> solrDocIdsForDraftFilesToDelete = findSolrDocIdsForDraftFilesToDelete(dataset);
String deleteDraftDatasetVersionResult = removeSolrDocFromIndex(solrIdDraftDataset);
StringBuilder deleteDraftFilesResults = new StringBuilder();
for (String doomed : solrDocIdsForDraftFilesToDelete) {
String result = removeSolrDocFromIndex(doomed);
deleteDraftFilesResults.append(result);
}
results.append("The latest version is published. Attempting to delete drafts. Result: ")
.append(deleteDraftDatasetVersionResult).append(deleteDraftFilesResults).append("\n");
desiredCards.put(DatasetVersion.VersionState.DEACCESSIONED, false);
String deleteDeaccessionedResult = removeDeaccessioned(dataset);
results.append("No need for deaccessioned version. Deletion attempted for ")
.append(solrIdDeaccessioned).append(". Result: ").append(deleteDeaccessionedResult);
/**
* Desired state for existence of cards: {RELEASED=true,
* DRAFT=false, DEACCESSIONED=false}
*
* Released versions found: 1. Will attempt to index as
* dataset_17 (discoverable by anonymous)
*
* Attempted to index dataset_17. Result: indexed dataset 17 as
* dataset_17. filesIndexed: [datafile_18]
*
* The latest version is published. Attempting to delete drafts.
* Result: Attempted to delete dataset_17_draft from Solr index.
* updateReponse was: {responseHeader={status=0,QTime=1}}
*
* No need for deaccessioned version. Deletion attempted for
* dataset_17_deaccessioned. Result: Attempted to delete
* dataset_17_deaccessioned from Solr index. updateReponse was:
* {responseHeader={status=0,QTime=1}}Attempted to delete
* datafile_18_deaccessioned from Solr index. updateReponse was:
* {responseHeader={status=0,QTime=0}}
*/
String result = getDesiredCardState(desiredCards) + results.toString() + debug.toString();
logger.info(result);
return result;
} else if (latestVersionState.equals(DatasetVersion.VersionState.DRAFT)) {
IndexableDataset indexableDraftVersion = new IndexableDataset(latestVersion);
desiredCards.put(DatasetVersion.VersionState.DRAFT, true);
String indexDraftResult = addOrUpdateDataset(indexableDraftVersion);
results.append("The latest version is a working copy (latestVersionState: ")
.append(latestVersionStateString).append(") and will be indexed as ")
.append(solrIdDraftDataset).append(" (limited visibility). Result: ").append(indexDraftResult).append("\n");
desiredCards.put(DatasetVersion.VersionState.RELEASED, true);
IndexableDataset indexableReleasedVersion = new IndexableDataset(releasedVersion);
String indexReleasedVersionResult = addOrUpdateDataset(indexableReleasedVersion);
results.append("There is a published version we will attempt to index. Result: ").append(indexReleasedVersionResult).append("\n");
desiredCards.put(DatasetVersion.VersionState.DEACCESSIONED, false);
String deleteDeaccessionedResult = removeDeaccessioned(dataset);
results.append("No need for deaccessioned version. Deletion attempted for ")
.append(solrIdDeaccessioned).append(". Result: ").append(deleteDeaccessionedResult);
/**
* Desired state for existence of cards: {DRAFT=true,
* RELEASED=true, DEACCESSIONED=false}
*
* Released versions found: 1. Will attempt to index as
* dataset_17 (discoverable by anonymous)
*
* The latest version is a working copy (latestVersionState:
* DRAFT) and will be indexed as dataset_17_draft (limited
* visibility). Result: indexed dataset 17 as dataset_17_draft.
* filesIndexed: [datafile_18_draft]
*
* There is a published version we will attempt to index.
* Result: indexed dataset 17 as dataset_17. filesIndexed:
* [datafile_18]
*
* No need for deaccessioned version. Deletion attempted for
* dataset_17_deaccessioned. Result: Attempted to delete
* dataset_17_deaccessioned from Solr index. updateReponse was:
* {responseHeader={status=0,QTime=1}}Attempted to delete
* datafile_18_deaccessioned from Solr index. updateReponse was:
* {responseHeader={status=0,QTime=0}}
*/
String result = getDesiredCardState(desiredCards) + results.toString() + debug.toString();
logger.info(result);
return result;
} else {
return "No-op. Unexpected condition reached: There is at least one published version but the latest version is neither published nor draft";
}
} else {
return "No-op. Unexpected condition reached: Negative number of released versions? Count was: " + numReleasedVersions;
}
}
#location 46
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void init() {
if (dataset.getId() != null) { // view mode for a dataset
dataset = datasetService.find(dataset.getId());
editVersion = dataset.getLatestVersion();
editVersion.setDatasetFieldValues(editVersion.initDatasetFieldValues());
editValues = editVersion.getDatasetFieldValues();
citationValues = extractValues(editValues, true);
otherMetadataValues = extractValues(editValues, false);
ownerId = dataset.getOwner().getId();
} else if (ownerId != null) { // create mode for a new child dataset
editMode = EditMode.CREATE;
dataset.setOwner(dataverseService.find(ownerId));
dataset.setVersions(new ArrayList());
editVersion.setDataset(dataset);
editVersion.setFileMetadatas(new ArrayList());
editVersion.setDatasetFieldValues(null);
editVersion.setVersionState(VersionState.DRAFT);
editVersion.setDatasetFieldValues(editVersion.initDatasetFieldValues());
editVersion.setVersionNumber(new Long(1));
editValues = editVersion.getDatasetFieldValues();
citationValues = extractValues(editValues, true);
otherMetadataValues = extractValues(editValues, false);
dataset.getVersions().add(editVersion);
} else {
throw new RuntimeException("On Dataset page without id or ownerid."); // improve error handling
}
setCitationFields(dataverseService.findCitationDatasetFieldsByDataverseId(ownerId));
setOtherMetadataFields(dataverseService.findOtherMetadataDatasetFieldsByDataverseId(ownerId));
}
|
#vulnerable code
public void init() {
if (dataset.getId() != null) { // view mode for a dataset
dataset = datasetService.find(dataset.getId());
editVersion = dataset.getLatestVersion();
editValues = editVersion.getDatasetFieldValues();
citationValues = extractValues(editValues, true);
otherMetadataValues = extractValues(editValues, false);
ownerId = dataset.getOwner().getId();
} else if (ownerId != null) { // create mode for a new child dataset
editMode = EditMode.CREATE;
dataset.setOwner(dataverseService.find(ownerId));
dataset.setVersions(new ArrayList());
editVersion.setDataset(dataset);
editVersion.setFileMetadatas(new ArrayList());
editVersion.setDatasetFieldValues(null);
editVersion.setVersionState(VersionState.DRAFT);
editVersion.setDatasetFieldValues(editVersion.initDatasetFieldValues());
editVersion.setVersionNumber(new Long(1));
editValues = editVersion.getDatasetFieldValues();
citationValues = extractValues(editValues, true);
otherMetadataValues = extractValues(editValues, false);
dataset.getVersions().add(editVersion);
} else {
throw new RuntimeException("On Dataset page without id or ownerid."); // improve error handling
}
setCitationFields(dataverseService.findCitationDatasetFieldsByDataverseId(ownerId));
setOtherMetadataFields(dataverseService.findOtherMetadataDatasetFieldsByDataverseId(ownerId));
}
#location 6
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public String releaseDraft() {
if (releaseRadio == 1) {
return releaseDataset(false);
} else {
return releaseDataset(true);
}
}
|
#vulnerable code
public String releaseDraft() {
if (releaseRadio == 1) {
dataset.getEditVersion().setVersionNumber(new Long(dataset.getReleasedVersion().getVersionNumber().intValue() + 1));
dataset.getEditVersion().setMinorVersionNumber(new Long(0));
} else {
dataset.getEditVersion().setVersionNumber(new Long(dataset.getReleasedVersion().getVersionNumber().intValue()));
dataset.getEditVersion().setMinorVersionNumber(new Long(dataset.getReleasedVersion().getMinorVersionNumber().intValue() + 1));
}
return releaseDataset(false);
}
#location 6
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void init() {
if (dataset.getId() != null) { // view mode for a dataset
dataset = datasetService.find(dataset.getId());
editVersion = dataset.getLatestVersion();
editVersion.setDatasetFieldValues(editVersion.initDatasetFieldValues());
editValues = editVersion.getDatasetFieldValues();
citationValues = extractValues(editValues, true);
otherMetadataValues = extractValues(editValues, false);
ownerId = dataset.getOwner().getId();
} else if (ownerId != null) { // create mode for a new child dataset
editMode = EditMode.CREATE;
dataset.setOwner(dataverseService.find(ownerId));
dataset.setVersions(new ArrayList());
editVersion.setDataset(dataset);
editVersion.setFileMetadatas(new ArrayList());
editVersion.setDatasetFieldValues(null);
editVersion.setVersionState(VersionState.DRAFT);
editVersion.setDatasetFieldValues(editVersion.initDatasetFieldValues());
editVersion.setVersionNumber(new Long(1));
editValues = editVersion.getDatasetFieldValues();
citationValues = extractValues(editValues, true);
otherMetadataValues = extractValues(editValues, false);
dataset.getVersions().add(editVersion);
} else {
throw new RuntimeException("On Dataset page without id or ownerid."); // improve error handling
}
setCitationFields(dataverseService.findCitationDatasetFieldsByDataverseId(ownerId));
setOtherMetadataFields(dataverseService.findOtherMetadataDatasetFieldsByDataverseId(ownerId));
}
|
#vulnerable code
public void init() {
if (dataset.getId() != null) { // view mode for a dataset
dataset = datasetService.find(dataset.getId());
editVersion = dataset.getLatestVersion();
editValues = editVersion.getDatasetFieldValues();
citationValues = extractValues(editValues, true);
otherMetadataValues = extractValues(editValues, false);
ownerId = dataset.getOwner().getId();
} else if (ownerId != null) { // create mode for a new child dataset
editMode = EditMode.CREATE;
dataset.setOwner(dataverseService.find(ownerId));
dataset.setVersions(new ArrayList());
editVersion.setDataset(dataset);
editVersion.setFileMetadatas(new ArrayList());
editVersion.setDatasetFieldValues(null);
editVersion.setVersionState(VersionState.DRAFT);
editVersion.setDatasetFieldValues(editVersion.initDatasetFieldValues());
editVersion.setVersionNumber(new Long(1));
editValues = editVersion.getDatasetFieldValues();
citationValues = extractValues(editValues, true);
otherMetadataValues = extractValues(editValues, false);
dataset.getVersions().add(editVersion);
} else {
throw new RuntimeException("On Dataset page without id or ownerid."); // improve error handling
}
setCitationFields(dataverseService.findCitationDatasetFieldsByDataverseId(ownerId));
setOtherMetadataFields(dataverseService.findOtherMetadataDatasetFieldsByDataverseId(ownerId));
}
#location 8
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public String releaseDraft() {
if (releaseRadio == 1) {
return releaseDataset(false);
} else {
return releaseDataset(true);
}
}
|
#vulnerable code
public String releaseDraft() {
if (releaseRadio == 1) {
dataset.getEditVersion().setVersionNumber(new Long(dataset.getReleasedVersion().getVersionNumber().intValue() + 1));
dataset.getEditVersion().setMinorVersionNumber(new Long(0));
} else {
dataset.getEditVersion().setVersionNumber(new Long(dataset.getReleasedVersion().getVersionNumber().intValue()));
dataset.getEditVersion().setMinorVersionNumber(new Long(dataset.getReleasedVersion().getMinorVersionNumber().intValue() + 1));
}
return releaseDataset(false);
}
#location 3
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void deleteContainer(String uri, AuthCredentials authCredentials, SwordConfiguration sc) throws SwordError, SwordServerException, SwordAuthException {
// swordConfiguration = (SwordConfigurationImpl) sc;
DataverseUser vdcUser = swordAuth.auth(authCredentials);
logger.fine("deleteContainer called with url: " + uri);
urlManager.processUrl(uri);
logger.fine("original url: " + urlManager.getOriginalUrl());
if (!"edit".equals(urlManager.getServlet())) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "edit servlet expected, not " + urlManager.getServlet());
}
String targetType = urlManager.getTargetType();
if (!targetType.isEmpty()) {
logger.fine("operating on target type: " + urlManager.getTargetType());
// StudyServiceLocal studyService;
Context ctx;
try {
ctx = new InitialContext();
// studyService = (StudyServiceLocal) ctx.lookup("java:comp/env/studyService");
} catch (NamingException ex) {
logger.info("problem looking up studyService");
throw new SwordServerException("problem looking up studyService");
}
if ("dataverse".equals(targetType)) {
/**
* @todo throw SWORD error recommending use of 4.0 "native" API
* to delete dataverses
*/
// String dvAlias = urlManager.getTargetIdentifier();
// List<VDC> userVDCs = vdcService.getUserVDCs(vdcUser.getId());
// VDC dataverseToEmpty = vdcService.findByAlias(dvAlias);
// if (dataverseToEmpty != null) {
// if ("Admin".equals(vdcUser.getNetworkRole().getName())) {
// if (swordConfiguration.allowNetworkAdminDeleteAllStudies()) {
//
// /**
// * @todo: this is the deleteContainer method...
// * should move this to some sort of "emptyContainer"
// * method
// */
// // curl --insecure -s -X DELETE https://sword:sword@localhost:8181/dvn/api/data-deposit/v1/swordv2/edit/dataverse/sword
// Collection<Study> studies = dataverseToEmpty.getOwnedStudies();
// for (Study study : studies) {
// logger.info("In dataverse " + dataverseToEmpty.getAlias() + " about to delete study id " + study.getId());
// studyService.deleteStudy(study.getId());
// }
// } else {
// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "DELETE on a dataverse is not supported");
// }
// } else {
// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Role was " + vdcUser.getNetworkRole().getName() + " but admin required.");
// }
// } else {
// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Couldn't find dataverse to delete from URL: " + uri);
// }
} else if ("study".equals(targetType)) {
String globalId = urlManager.getTargetIdentifier();
logger.info("globalId: " + globalId);
if (globalId != null) {
Dataset study = null;
try {
study = datasetService.findByGlobalId(globalId);
} catch (EJBException ex) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find study based on global id (" + globalId + ") in URL: " + uri);
}
if (study != null) {
Dataverse dvThatOwnsStudy = study.getOwner();
if (swordAuth.hasAccessToModifyDataverse(vdcUser, dvThatOwnsStudy)) {
DatasetVersion.VersionState studyState = study.getLatestVersion().getVersionState();
if (studyState.equals(DatasetVersion.VersionState.DRAFT)) {
logger.info("destroying working copy version of study " + study.getGlobalId());
/**
* @todo in DVN 3.x we had a convenient
* destroyWorkingCopyVersion method but the
* DeleteDatasetCommand is pretty scary... what
* if a released study has a new draft version?
* What we need is a
* DeleteDatasetVersionCommand, I suppose...
*/
// studyService.destroyWorkingCopyVersion(study.getLatestVersion().getId());
try {
engineSvc.submit(new DeleteDatasetCommand(study, vdcUser));
/**
* @todo re-index after deletion
* https://redmine.hmdc.harvard.edu/issues/3544#note-21
*/
logger.info("dataset deleted");
} catch (CommandExecutionException ex) {
// internal error
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Can't delete dataset: " + ex.getMessage());
} catch (CommandException ex) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Can't delete dataset: " + ex.getMessage());
}
/**
* @todo think about how to handle non-drafts
*/
} else if (studyState.equals(DatasetVersion.VersionState.RELEASED)) {
// logger.fine("deaccessioning latest version of study " + study.getGlobalId());
// studyService.deaccessionStudy(study.getLatestVersion());
} else if (studyState.equals(DatasetVersion.VersionState.DEACCESSIONED)) {
// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Lastest version of study " + study.getGlobalId() + " has already been deaccessioned.");
} else if (studyState.equals(DatasetVersion.VersionState.ARCHIVED)) {
// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Lastest version of study " + study.getGlobalId() + " has been archived and can not be deleted or deaccessioned.");
} else if (studyState.equals(DatasetVersion.VersionState.IN_REVIEW)) {
// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Lastest version of study " + study.getGlobalId() + " is in review and can not be deleted or deaccessioned.");
} else {
// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Operation not valid for study " + study.getGlobalId() + " in state " + studyState);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + vdcUser.getUserName() + " is not authorized to modify " + dvThatOwnsStudy.getAlias());
}
} else {
throw new SwordError(404);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find study to delete from URL: " + uri);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unsupported delete target in URL:" + uri);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "No target for deletion specified");
}
}
|
#vulnerable code
@Override
public void deleteContainer(String uri, AuthCredentials authCredentials, SwordConfiguration sc) throws SwordError, SwordServerException, SwordAuthException {
// swordConfiguration = (SwordConfigurationImpl) sc;
DataverseUser vdcUser = swordAuth.auth(authCredentials);
logger.fine("deleteContainer called with url: " + uri);
urlManager.processUrl(uri);
logger.fine("original url: " + urlManager.getOriginalUrl());
if (!"edit".equals(urlManager.getServlet())) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "edit servlet expected, not " + urlManager.getServlet());
}
String targetType = urlManager.getTargetType();
if (!targetType.isEmpty()) {
logger.fine("operating on target type: " + urlManager.getTargetType());
// StudyServiceLocal studyService;
Context ctx;
try {
ctx = new InitialContext();
// studyService = (StudyServiceLocal) ctx.lookup("java:comp/env/studyService");
} catch (NamingException ex) {
logger.info("problem looking up studyService");
throw new SwordServerException("problem looking up studyService");
}
if ("dataverse".equals(targetType)) {
/**
* @todo throw SWORD error recommending use of 4.0 "native" API
* to delete dataverses
*/
// String dvAlias = urlManager.getTargetIdentifier();
// List<VDC> userVDCs = vdcService.getUserVDCs(vdcUser.getId());
// VDC dataverseToEmpty = vdcService.findByAlias(dvAlias);
// if (dataverseToEmpty != null) {
// if ("Admin".equals(vdcUser.getNetworkRole().getName())) {
// if (swordConfiguration.allowNetworkAdminDeleteAllStudies()) {
//
// /**
// * @todo: this is the deleteContainer method...
// * should move this to some sort of "emptyContainer"
// * method
// */
// // curl --insecure -s -X DELETE https://sword:sword@localhost:8181/dvn/api/data-deposit/v1/swordv2/edit/dataverse/sword
// Collection<Study> studies = dataverseToEmpty.getOwnedStudies();
// for (Study study : studies) {
// logger.info("In dataverse " + dataverseToEmpty.getAlias() + " about to delete study id " + study.getId());
// studyService.deleteStudy(study.getId());
// }
// } else {
// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "DELETE on a dataverse is not supported");
// }
// } else {
// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Role was " + vdcUser.getNetworkRole().getName() + " but admin required.");
// }
// } else {
// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Couldn't find dataverse to delete from URL: " + uri);
// }
} else if ("study".equals(targetType)) {
String globalId = urlManager.getTargetIdentifier();
logger.info("globalId: " + globalId);
if (globalId != null) {
Dataset study = null;
try {
/**
* @todo don't hard code this, obviously. In DVN 3.x we
* had a method for
* studyService.getStudyByGlobalId(globalId)
*/
// study = studyService.getStudyByGlobalId(globalId);
long databaseIdForRoastingAtHomeDataset = 10;
study = datasetService.find(databaseIdForRoastingAtHomeDataset);
} catch (EJBException ex) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find study based on global id (" + globalId + ") in URL: " + uri);
}
if (study != null) {
Dataverse dvThatOwnsStudy = study.getOwner();
if (swordAuth.hasAccessToModifyDataverse(vdcUser, dvThatOwnsStudy)) {
DatasetVersion.VersionState studyState = study.getLatestVersion().getVersionState();
if (studyState.equals(DatasetVersion.VersionState.DRAFT)) {
/**
* @todo use getGlobalId when it's available
*/
logger.info("destroying working copy version of study " + study.getIdentifier());
/**
* @todo in DVN 3.x we had a convenient
* destroyWorkingCopyVersion method but the
* DeleteDatasetCommand is pretty scary... what
* if a released study has a new draft version?
* What we need is a
* DeleteDatasetVersionCommand, I suppose...
*/
// studyService.destroyWorkingCopyVersion(study.getLatestVersion().getId());
try {
engineSvc.submit(new DeleteDatasetCommand(study, vdcUser));
/**
* @todo re-index after deletion
* https://redmine.hmdc.harvard.edu/issues/3544#note-21
*/
logger.info("dataset deleted");
} catch (CommandExecutionException ex) {
// internal error
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Can't delete dataset: " + ex.getMessage());
} catch (CommandException ex) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Can't delete dataset: " + ex.getMessage());
}
/**
* @todo think about how to handle non-drafts
*/
} else if (studyState.equals(DatasetVersion.VersionState.RELEASED)) {
// logger.fine("deaccessioning latest version of study " + study.getGlobalId());
// studyService.deaccessionStudy(study.getLatestVersion());
} else if (studyState.equals(DatasetVersion.VersionState.DEACCESSIONED)) {
// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Lastest version of study " + study.getGlobalId() + " has already been deaccessioned.");
} else if (studyState.equals(DatasetVersion.VersionState.ARCHIVED)) {
// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Lastest version of study " + study.getGlobalId() + " has been archived and can not be deleted or deaccessioned.");
} else if (studyState.equals(DatasetVersion.VersionState.IN_REVIEW)) {
// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Lastest version of study " + study.getGlobalId() + " is in review and can not be deleted or deaccessioned.");
} else {
// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Operation not valid for study " + study.getGlobalId() + " in state " + studyState);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + vdcUser.getUserName() + " is not authorized to modify " + dvThatOwnsStudy.getAlias());
}
} else {
throw new SwordError(404);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find study to delete from URL: " + uri);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unsupported delete target in URL:" + uri);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "No target for deletion specified");
}
}
#location 78
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public String indexDataset(Dataset dataset) {
logger.info("indexing dataset " + dataset.getId());
String solrIdDraftStudy = "dataset_" + dataset.getId() + "_draft";
String solrIdPublishedStudy = "dataset_" + dataset.getId();
StringBuilder sb = new StringBuilder();
sb.append("rationale:\n");
List<DatasetVersion> versions = dataset.getVersions();
for (DatasetVersion datasetVersion : versions) {
Long versionDatabaseId = datasetVersion.getId();
String versionTitle = datasetVersion.getTitle();
String semanticVersion = datasetVersion.getSemanticVersion();
String versionState = datasetVersion.getVersionState().name();
boolean versionIsReleased = datasetVersion.isReleased();
boolean versionIsWorkingCopy = datasetVersion.isWorkingCopy();
sb.append("version found with database id " + versionDatabaseId + "\n");
sb.append("- title: " + versionTitle + "\n");
sb.append("- semanticVersion-STATE: " + semanticVersion + "-" + versionState + "\n");
sb.append("- isWorkingCopy: " + versionIsWorkingCopy + "\n");
sb.append("- isReleased: " + versionIsReleased + "\n");
}
DatasetVersion latestVersion = dataset.getLatestVersion();
String latestVersionState = latestVersion.getVersionState().name();
DatasetVersion releasedVersion = dataset.getReleasedVersion();
if (latestVersion.isWorkingCopy()) {
sb.append("The latest version is a working copy (latestVersionState: " + latestVersionState + ") and will be indexed as " + solrIdDraftStudy + " (only visible by creator)\n");
if (releasedVersion != null) {
String releasedVersionState = releasedVersion.getVersionState().name();
String semanticVersion = releasedVersion.getSemanticVersion();
sb.append("The released version is " + semanticVersion + " (releasedVersionState: " + releasedVersionState + ") and will be indexed as " + solrIdPublishedStudy + " (visible by anonymous)");
/**
* The latest version is a working copy (latestVersionState:
* DRAFT) and will be indexed as dataset_17_draft (only visible
* by creator)
*
* The released version is 1.0 (releasedVersionState: RELEASED)
* and will be indexed as dataset_17 (visible by anonymous)
*/
logger.info(sb.toString());
String indexDraftResult = indexDatasetAddOrUpdate(dataset);
String indexReleasedVersionResult = indexDatasetAddOrUpdate(dataset);
return "indexDraftResult:" + indexDraftResult + ", indexReleasedVersionResult:" + indexReleasedVersionResult + ", " + sb.toString();
} else {
sb.append("There is no released version yet so nothing will be indexed as " + solrIdPublishedStudy);
/**
* The latest version is a working copy (latestVersionState:
* DRAFT) and will be indexed as dataset_33_draft (only visible
* by creator)
*
* There is no released version yet so nothing will be indexed
* as dataset_33
*/
logger.info(sb.toString());
String indexDraftResult = indexDatasetAddOrUpdate(dataset);
return "indexDraftResult:" + indexDraftResult + ", " + sb.toString();
}
} else {
sb.append("The latest version is not a working copy (latestVersionState: " + latestVersionState + ") and will be indexed as " + solrIdPublishedStudy + " (visible by anonymous) and we will be deleting " + solrIdDraftStudy + "\n");
if (releasedVersion != null) {
String releasedVersionState = releasedVersion.getVersionState().name();
String semanticVersion = releasedVersion.getSemanticVersion();
sb.append("The released version is " + semanticVersion + " (releasedVersionState: " + releasedVersionState + ") and will be (again) indexed as " + solrIdPublishedStudy + " (visible by anonymous)");
/**
* The latest version is not a working copy (latestVersionState:
* RELEASED) and will be indexed as dataset_34 (visible by
* anonymous) and we will be deleting dataset_34_draft
*
* The released version is 1.0 (releasedVersionState: RELEASED)
* and will be (again) indexed as dataset_34 (visible by anonymous)
*/
logger.info(sb.toString());
String deleteDraftVersionResult = removeDatasetDraftFromIndex(solrIdDraftStudy);
String indexReleasedVersionResult = indexDatasetAddOrUpdate(dataset);
return "deleteDraftVersionResult: " + deleteDraftVersionResult + ", indexReleasedVersionResult:" + indexReleasedVersionResult + ", " + sb.toString();
} else {
sb.append("We don't ever expect to ever get here. Why is there no released version if the latest version is not a working copy? The latestVersionState is " + latestVersionState + " and we don't know what to do with it. Nothing will be added or deleted from the index.");
logger.info(sb.toString());
return sb.toString();
}
}
}
|
#vulnerable code
public String indexDataset(Dataset dataset) {
logger.info("indexing dataset " + dataset.getId());
Collection<SolrInputDocument> docs = new ArrayList<>();
List<String> dataversePathSegmentsAccumulator = new ArrayList<>();
List<String> dataverseSegments = new ArrayList<>();
try {
dataverseSegments = findPathSegments(dataset.getOwner(), dataversePathSegmentsAccumulator);
} catch (Exception ex) {
logger.info("failed to find dataverseSegments for dataversePaths for " + SearchFields.SUBTREE + ": " + ex);
}
List<String> dataversePaths = getDataversePathsFromSegments(dataverseSegments);
SolrInputDocument solrInputDocument = new SolrInputDocument();
solrInputDocument.addField(SearchFields.ID, "dataset_" + dataset.getId());
solrInputDocument.addField(SearchFields.ENTITY_ID, dataset.getId());
solrInputDocument.addField(SearchFields.TYPE, "datasets");
if (dataset.isReleased()) {
solrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE, dataset.getPublicationDate());
solrInputDocument.addField(SearchFields.PERMS, publicGroupString);
} else if (dataset.getOwner().getCreator() != null) {
/**
* todo why is dataset.getCreateDate() null? For now I guess we'll
* use the createDate of it's parent dataverse?! https://redmine.hmdc.harvard.edu/issues/3806
*/
// solrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE, dataset.getCreateDate());
solrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE, dataset.getOwner().getCreateDate());
solrInputDocument.addField(SearchFields.PERMS, groupPerUserPrefix + dataset.getOwner().getCreator().getId());
/**
* @todo: replace this fake version of granting users access to
* dataverses with the real thing, when it's available in the app
*/
if (dataset.getOwner().getCreator().getUserName().equals("pete")) {
// figure out if cathy is around
DataverseUser cathy = dataverseUserServiceBean.findByUserName("cathy");
if (cathy != null) {
// let cathy see all of pete's dataverses
solrInputDocument.addField(SearchFields.PERMS, groupPerUserPrefix + cathy.getId());
}
}
} else {
/**
* todo why is dataset.getCreateDate() null? For now I guess we'll
* use the createDate of it's parent dataverse?! https://redmine.hmdc.harvard.edu/issues/3806
*/
// solrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE, dataset.getCreateDate());
solrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE, dataset.getOwner().getCreateDate());
/**
* @todo: remove this once everyone has dropped their database and
* won't get NPE's from dataverse.getCreator
*/
solrInputDocument.addField(SearchFields.PERMS, npeGetCreator);
}
/**
* @todo: remove this fake "has access to all data" group
*/
solrInputDocument.addField(SearchFields.PERMS, groupPrefix + tmpNsaGroupId);
addDatasetReleaseDateToSolrDoc(solrInputDocument, dataset);
if (dataset.getLatestVersion() != null) {
DatasetVersionUI datasetVersionUI = null;
try {
datasetVersionUI = new DatasetVersionUI(dataset.getLatestVersion());
} catch (NullPointerException ex) {
logger.info("Caught exception trying to instantiate DatasetVersionUI for dataset " + dataset.getId() + ". : " + ex);
}
if (datasetVersionUI != null) {
String citation = null;
try {
citation = datasetVersionUI.getCitation();
if (citation != null) {
solrInputDocument.addField(SearchFields.CITATION, citation);
}
} catch (NullPointerException ex) {
logger.info("Caught exception trying to get citation for dataset " + dataset.getId() + ". : " + ex);
}
}
for (DatasetField dsf : dataset.getLatestVersion().getFlatDatasetFields()) {
DatasetFieldType dsfType = dsf.getDatasetFieldType();
String solrFieldSearchable = dsfType.getSolrField().getNameSearchable();
String solrFieldFacetable = dsfType.getSolrField().getNameFacetable();
if (dsf.getValues() != null && !dsf.getValues().isEmpty() && dsf.getValues().get(0) != null && solrFieldSearchable != null) {
logger.info("indexing " + dsf.getDatasetFieldType().getName() + ":" + dsf.getValues() + " into " + solrFieldSearchable + " and maybe " + solrFieldFacetable);
if (dsfType.getSolrField().getSolrType().equals(SolrField.SolrType.INTEGER)) {
String dateAsString = dsf.getValues().get(0);
logger.info("date as string: " + dateAsString);
if (dateAsString != null && !dateAsString.isEmpty()) {
SimpleDateFormat inputDateyyyy = new SimpleDateFormat("yyyy", Locale.ENGLISH);
try {
/**
* @todo when bean validation is working we
* won't have to convert strings into dates
*/
logger.info("Trying to convert " + dateAsString + " to a YYYY date from dataset " + dataset.getId());
Date dateAsDate = inputDateyyyy.parse(dateAsString);
SimpleDateFormat yearOnly = new SimpleDateFormat("yyyy");
String datasetFieldFlaggedAsDate = yearOnly.format(dateAsDate);
logger.info("YYYY only: " + datasetFieldFlaggedAsDate);
solrInputDocument.addField(solrFieldSearchable, Integer.parseInt(datasetFieldFlaggedAsDate));
if (dsfType.getSolrField().isFacetable()) {
solrInputDocument.addField(solrFieldFacetable, Integer.parseInt(datasetFieldFlaggedAsDate));
}
} catch (Exception ex) {
logger.info("unable to convert " + dateAsString + " into YYYY format and couldn't index it (" + dsfType.getName() + ")");
}
}
} else {
// _s (dynamic string) and all other Solr fields
if (dsf.getDatasetFieldType().getName().equals("authorAffiliation")) {
/**
* @todo think about how to tie the fact that this
* needs to be multivalued (_ss) because a
* multivalued facet (authorAffilition_ss) is being
* collapsed into here at index time. The business
* logic to determine if a data-driven metadata
* field should be indexed into Solr as a single or
* multiple value lives in the getSolrField() method
* of DatasetField.java
*/
solrInputDocument.addField(SearchFields.AFFILIATION, dsf.getValues());
} else if (dsf.getDatasetFieldType().getName().equals("title")) {
// datasets have titles not names but index title under name as well so we can sort datasets by name along dataverses and files
solrInputDocument.addField(SearchFields.NAME_SORT, dsf.getValues());
}
if (dsfType.isControlledVocabulary()) {
for (ControlledVocabularyValue controlledVocabularyValue : dsf.getControlledVocabularyValues()) {
solrInputDocument.addField(solrFieldSearchable, controlledVocabularyValue.getStrValue());
if (dsfType.getSolrField().isFacetable()) {
solrInputDocument.addField(solrFieldFacetable, controlledVocabularyValue.getStrValue());
}
}
} else {
solrInputDocument.addField(solrFieldSearchable, dsf.getValues());
if (dsfType.getSolrField().isFacetable()) {
solrInputDocument.addField(solrFieldFacetable, dsf.getValues());
}
}
}
}
/**
* @todo: review all code below... commented out old indexing of
* hard coded fields. Also, should we respect the
* isAdvancedSearchField boolean?
*/
// if (datasetField.isAdvancedSearchField()) {
// advancedSearchFields.add(idDashName);
// logger.info(idDashName + " is an advanced search field (" + title + ")");
// if (name.equals(DatasetFieldConstant.title)) {
// String toIndexTitle = datasetFieldValue.getStrValue();
// if (toIndexTitle != null && !toIndexTitle.isEmpty()) {
// solrInputDocument.addField(SearchFields.TITLE, toIndexTitle);
// }
// } else if (name.equals(DatasetFieldConstant.authorName)) {
// String toIndexAuthor = datasetFieldValue.getStrValue();
// if (toIndexAuthor != null && !toIndexAuthor.isEmpty()) {
// logger.info("index this author: " + toIndexAuthor);
// solrInputDocument.addField(SearchFields.AUTHOR_STRING, toIndexAuthor);
// }
// } else if (name.equals(DatasetFieldConstant.productionDate)) {
// String toIndexProductionDateString = datasetFieldValue.getStrValue();
// logger.info("production date: " + toIndexProductionDateString);
// if (toIndexProductionDateString != null && !toIndexProductionDateString.isEmpty()) {
// SimpleDateFormat inputDateyyyy = new SimpleDateFormat("yyyy", Locale.ENGLISH);
// try {
// logger.info("Trying to convert " + toIndexProductionDateString + " to a YYYY date from dataset " + dataset.getId());
// Date productionDate = inputDateyyyy.parse(toIndexProductionDateString);
// SimpleDateFormat yearOnly = new SimpleDateFormat("yyyy");
// String productionYear = yearOnly.format(productionDate);
// logger.info("YYYY only: " + productionYear);
// solrInputDocument.addField(SearchFields.PRODUCTION_DATE_YEAR_ONLY, Integer.parseInt(productionYear));
// solrInputDocument.addField(SearchFields.PRODUCTION_DATE_ORIGINAL, productionDate);
// } catch (Exception ex) {
// logger.info("unable to convert " + toIndexProductionDateString + " into YYYY format");
// }
// }
// /**
// * @todo: DRY! this is the same as above!
// */
// } else if (name.equals(DatasetFieldConstant.distributionDate)) {
// String toIndexdistributionDateString = datasetFieldValue.getStrValue();
// logger.info("distribution date: " + toIndexdistributionDateString);
// if (toIndexdistributionDateString != null && !toIndexdistributionDateString.isEmpty()) {
// SimpleDateFormat inputDateyyyy = new SimpleDateFormat("yyyy", Locale.ENGLISH);
// try {
// logger.info("Trying to convert " + toIndexdistributionDateString + " to a YYYY date from dataset " + dataset.getId());
// Date distributionDate = inputDateyyyy.parse(toIndexdistributionDateString);
// SimpleDateFormat yearOnly = new SimpleDateFormat("yyyy");
// String distributionYear = yearOnly.format(distributionDate);
// logger.info("YYYY only: " + distributionYear);
// solrInputDocument.addField(SearchFields.DISTRIBUTION_DATE_YEAR_ONLY, Integer.parseInt(distributionYear));
// solrInputDocument.addField(SearchFields.DISTRIBUTION_DATE_ORIGINAL, distributionDate);
// } catch (Exception ex) {
// logger.info("unable to convert " + toIndexdistributionDateString + " into YYYY format");
// }
// }
// } else if (name.equals(DatasetFieldConstant.keywordValue)) {
// String toIndexKeyword = datasetFieldValue.getStrValue();
// if (toIndexKeyword != null && !toIndexKeyword.isEmpty()) {
// solrInputDocument.addField(SearchFields.KEYWORD, toIndexKeyword);
// }
// } else if (name.equals(DatasetFieldConstant.distributorName)) {
// String toIndexDistributor = datasetFieldValue.getStrValue();
// if (toIndexDistributor != null && !toIndexDistributor.isEmpty()) {
// solrInputDocument.addField(SearchFields.DISTRIBUTOR, toIndexDistributor);
// }
// } else if (name.equals(DatasetFieldConstant.description)) {
// String toIndexDescription = datasetFieldValue.getStrValue();
// if (toIndexDescription != null && !toIndexDescription.isEmpty()) {
// solrInputDocument.addField(SearchFields.DESCRIPTION, toIndexDescription);
// }
// }
// } else {
// notAdvancedSearchFields.add(idDashName);
// logger.info(idDashName + " is not an advanced search field (" + title + ")");
// }
}
}
solrInputDocument.addField(SearchFields.SUBTREE, dataversePaths);
// solrInputDocument.addField(SearchFields.HOST_DATAVERSE, dataset.getOwner().getName());
solrInputDocument.addField(SearchFields.PARENT_ID, dataset.getOwner().getId());
solrInputDocument.addField(SearchFields.PARENT_NAME, dataset.getOwner().getName());
docs.add(solrInputDocument);
List<DataFile> files = dataset.getFiles();
for (DataFile dataFile : files) {
SolrInputDocument datafileSolrInputDocument = new SolrInputDocument();
datafileSolrInputDocument.addField(SearchFields.ID, "datafile_" + dataFile.getId());
datafileSolrInputDocument.addField(SearchFields.ENTITY_ID, dataFile.getId());
datafileSolrInputDocument.addField(SearchFields.TYPE, "files");
datafileSolrInputDocument.addField(SearchFields.NAME, dataFile.getName());
datafileSolrInputDocument.addField(SearchFields.NAME_SORT, dataFile.getName());
if (dataset.isReleased()) {
/**
* @todo: are datafiles supposed to have release dates? It's
* null. For now just set something: https://redmine.hmdc.harvard.edu/issues/3806
*/
// datafileSolrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE, dataFile.getReleaseDate());
datafileSolrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE, dataFile.getOwner().getOwner().getCreateDate());
datafileSolrInputDocument.addField(SearchFields.PERMS, publicGroupString);
} else if (dataset.getOwner().getCreator() != null) {
/**
* todo why is dataFile.getCreateDate() null? For now I guess
* we'll use the createDate of its parent datase's dataverset?! https://redmine.hmdc.harvard.edu/issues/3806
*/
// datafileSolrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE, dataFile.getCreateDate());
datafileSolrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE, dataFile.getOwner().getOwner().getCreateDate());
datafileSolrInputDocument.addField(SearchFields.PERMS, groupPerUserPrefix + dataset.getOwner().getCreator().getId());
/**
* @todo: replace this fake version of granting users access to
* dataverses with the real thing, when it's available in the
* app
*/
if (dataset.getOwner().getCreator().getUserName().equals("pete")) {
// figure out if cathy is around
DataverseUser cathy = dataverseUserServiceBean.findByUserName("cathy");
if (cathy != null) {
// let cathy see all of pete's dataverses
datafileSolrInputDocument.addField(SearchFields.PERMS, groupPerUserPrefix + cathy.getId());
}
}
} else {
/**
* @todo: remove this once everyone has dropped their database
* and won't get NPE's from dataverse.getCreator
*/
/**
* todo why is dataFile.getCreateDate() null? For now I guess
* we'll use the createDate of its parent dataset's dataverse?! https://redmine.hmdc.harvard.edu/issues/3806
*/
// datafileSolrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE, dataFile.getCreateDate());
datafileSolrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE, dataFile.getOwner().getOwner().getCreateDate());
datafileSolrInputDocument.addField(SearchFields.PERMS, npeGetCreator);
}
/**
* @todo: remove this fake "has access to all data" group
*/
datafileSolrInputDocument.addField(SearchFields.PERMS, groupPrefix + tmpNsaGroupId);
// For the mime type, we are going to index the "friendly" version, e.g.,
// "PDF File" instead of "application/pdf", "MS Excel" instead of
// "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" (!), etc.,
// if available:
datafileSolrInputDocument.addField(SearchFields.FILE_TYPE_MIME, dataFile.getFriendlyType());
// For the file type facets, we have a property file that maps mime types
// to facet-friendly names; "application/fits" should become "FITS", etc.:
datafileSolrInputDocument.addField(SearchFields.FILE_TYPE, FileUtil.getFacetFileType(dataFile));
datafileSolrInputDocument.addField(SearchFields.DESCRIPTION, dataFile.getDescription());
datafileSolrInputDocument.addField(SearchFields.SUBTREE, dataversePaths);
// datafileSolrInputDocument.addField(SearchFields.HOST_DATAVERSE, dataFile.getOwner().getOwner().getName());
// datafileSolrInputDocument.addField(SearchFields.PARENT_NAME, dataFile.getDataset().getTitle());
datafileSolrInputDocument.addField(SearchFields.PARENT_ID, dataFile.getOwner().getId());
if (!dataFile.getOwner().getLatestVersion().getTitle().isEmpty()) {
datafileSolrInputDocument.addField(SearchFields.PARENT_NAME, dataFile.getOwner().getLatestVersion().getTitle());
}
// If this is a tabular data file -- i.e., if there are data
// variables associated with this file, we index the variable
// names and labels:
if (dataFile.isTabularData()) {
List<DataVariable> variables = dataFile.getDataTable().getDataVariables();
String variableNamesToIndex = null;
String variableLabelsToIndex = null;
for (DataVariable var : variables) {
// Hard-coded search fields, for now:
// TODO: immediately: define these as constants in SearchFields;
// TODO: eventually: review, decide how datavariables should
// be handled for indexing purposes. (should it be a fixed
// setup, defined in the code? should it be flexible? unlikely
// that this needs to be domain-specific... since these data
// variables are quite specific to tabular data, which in turn
// is something social science-specific...
// anyway -- needs to be reviewed. -- L.A. 4.0alpha1
if (var.getName() != null && !var.getName().equals("")) {
if (variableNamesToIndex == null) {
variableNamesToIndex = var.getName();
} else {
variableNamesToIndex = variableNamesToIndex + " " + var.getName();
}
}
if (var.getLabel() != null && !var.getLabel().equals("")) {
if (variableLabelsToIndex == null) {
variableLabelsToIndex = var.getLabel();
} else {
variableLabelsToIndex = variableLabelsToIndex + " " + var.getLabel();
}
}
}
if (variableNamesToIndex != null) {
logger.info("indexing " + variableNamesToIndex.length() + " bytes");
datafileSolrInputDocument.addField("varname_s", variableNamesToIndex);
}
if (variableLabelsToIndex != null) {
logger.info("indexing " + variableLabelsToIndex.length() + " bytes");
datafileSolrInputDocument.addField("varlabel_s", variableLabelsToIndex);
}
}
// And if the file has indexable file-level metadata associated
// with it, we'll index that too:
List<FileMetadataFieldValue> fileMetadataFieldValues = dataFile.getFileMetadataFieldValues();
if (fileMetadataFieldValues != null && fileMetadataFieldValues.size() > 0) {
for (int j = 0; j < fileMetadataFieldValues.size(); j++) {
String fieldValue = fileMetadataFieldValues.get(j).getStrValue();
FileMetadataField fmf = fileMetadataFieldValues.get(j).getFileMetadataField();
String fileMetadataFieldName = fmf.getName();
String fileMetadataFieldFormatName = fmf.getFileFormatName();
String fieldName = fileMetadataFieldFormatName + "-" + fileMetadataFieldName + "_s";
datafileSolrInputDocument.addField(fieldName, fieldValue);
}
}
docs.add(datafileSolrInputDocument);
}
/**
* @todo allow for configuration of hostname and port
*/
SolrServer server = new HttpSolrServer("http://localhost:8983/solr/");
try {
server.add(docs);
} catch (SolrServerException | IOException ex) {
return ex.toString();
}
try {
server.commit();
} catch (SolrServerException | IOException ex) {
return ex.toString();
}
return "indexed dataset " + dataset.getId(); // + ":" + dataset.getTitle();
}
#location 7
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public synchronized String CalculateMD5 (String datafile) {
FileInputStream fis = null;
try {
fis = new FileInputStream(datafile);
} catch (FileNotFoundException ex) {
throw new RuntimeException(ex);
}
return CalculateMD5(fis);
/*
byte[] dataBytes = new byte[1024];
int nread;
try {
while ((nread = fis.read(dataBytes)) != -1) {
md.update(dataBytes, 0, nread);
}
} catch (IOException ex) {
throw new RuntimeException(ex);
}
byte[] mdbytes = md.digest();
StringBuilder sb = new StringBuilder("");
for (int i = 0; i < mdbytes.length; i++) {
sb.append(Integer.toString((mdbytes[i] & 0xff) + 0x100, 16).substring(1));
}
return sb.toString();
*/
}
|
#vulnerable code
public synchronized String CalculateMD5 (String datafile) {
MessageDigest md = null;
try {
md = MessageDigest.getInstance("MD5");
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
FileInputStream fis = null;
try {
fis = new FileInputStream(datafile);
} catch (FileNotFoundException ex) {
throw new RuntimeException(ex);
}
byte[] dataBytes = new byte[1024];
int nread;
try {
while ((nread = fis.read(dataBytes)) != -1) {
md.update(dataBytes, 0, nread);
}
} catch (IOException ex) {
throw new RuntimeException(ex);
}
byte[] mdbytes = md.digest();
StringBuilder sb = new StringBuilder("");
for (int i = 0; i < mdbytes.length; i++) {
sb.append(Integer.toString((mdbytes[i] & 0xff) + 0x100, 16).substring(1));
}
return sb.toString();
}
#location 22
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void handleDropBoxUpload(ActionEvent e) {
// Read JSON object from the output of the DropBox Chooser:
JsonReader dbJsonReader = Json.createReader(new StringReader(dropBoxSelection));
JsonArray dbArray = dbJsonReader.readArray();
dbJsonReader.close();
for (int i = 0; i < dbArray.size(); i++) {
JsonObject dbObject = dbArray.getJsonObject(i);
// Extract the payload:
String fileLink = dbObject.getString("link");
String fileName = dbObject.getString("name");
int fileSize = dbObject.getInt("bytes");
logger.info("DropBox url: " + fileLink + ", filename: " + fileName + ", size: " + fileSize);
DataFile dFile = null;
// Make http call, download the file:
GetMethod dropBoxMethod = new GetMethod(fileLink);
int status = 0;
InputStream dropBoxStream = null;
try {
status = getClient().executeMethod(dropBoxMethod);
if (status == 200) {
dropBoxStream = dropBoxMethod.getResponseBodyAsStream();
// If we've made it this far, we must have been able to
// make a successful HTTP call to the DropBox server and
// obtain an InputStream - so we can now create a new
// DataFile object:
dFile = ingestService.createDataFile(editVersion, dropBoxStream, fileName, null);
newFiles.add(dFile);
}
} catch (IOException ex) {
logger.warning("Failed to access DropBox url: " + fileLink + "!");
continue;
} finally {
if (dropBoxMethod != null) {
dropBoxMethod.releaseConnection();
}
if (dropBoxStream != null) {
try {
dropBoxStream.close();
} catch (Exception ex) {
//logger.whocares("...");
}
}
}
}
}
|
#vulnerable code
public void handleDropBoxUpload(ActionEvent e) {
// Read JSON object from the output of the DropBox Chooser:
JsonReader dbJsonReader = Json.createReader(new StringReader(dropBoxSelection));
JsonArray dbArray = dbJsonReader.readArray();
dbJsonReader.close();
for (int i = 0; i < dbArray.size(); i++) {
JsonObject dbObject = dbArray.getJsonObject(i);
// Extract the payload:
String fileLink = dbObject.getString("link");
String fileName = dbObject.getString("name");
int fileSize = dbObject.getInt("bytes");
logger.info("DropBox url: " + fileLink + ", filename: " + fileName + ", size: " + fileSize);
DataFile dFile = null;
// Make http call, download the file:
GetMethod dropBoxMethod = new GetMethod(fileLink);
int status = 0;
InputStream dropBoxStream = null;
try {
status = getClient().executeMethod(dropBoxMethod);
if (status == 200) {
dropBoxStream = dropBoxMethod.getResponseBodyAsStream();
dFile = new DataFile("application/octet-stream");
dFile.setOwner(dataset);
// save the file, in the temporary location for now:
datasetService.generateFileSystemName(dFile);
if (ingestService.getFilesTempDirectory() != null) {
logger.info("Will attempt to save the DropBox file as: " + ingestService.getFilesTempDirectory() + "/" + dFile.getFileSystemName());
Files.copy(dropBoxStream, Paths.get(ingestService.getFilesTempDirectory(), dFile.getFileSystemName()), StandardCopyOption.REPLACE_EXISTING);
File tempFile = Paths.get(ingestService.getFilesTempDirectory(), dFile.getFileSystemName()).toFile();
if (tempFile.exists()) {
long writtenBytes = tempFile.length();
logger.info("File size, expected: " + fileSize + ", written: " + writtenBytes);
} else {
throw new IOException();
}
}
}
} catch (IOException ex) {
logger.warning("Failed to access DropBox url: " + fileLink + "!");
continue;
} finally {
if (dropBoxMethod != null) {
dropBoxMethod.releaseConnection();
}
if (dropBoxStream != null) {
try {
dropBoxStream.close();
} catch (Exception ex) {
}
}
}
// If we've made it this far, we must have downloaded the file
// successfully, so let's finish processing it as a new DataFile
// object:
FileMetadata fmd = new FileMetadata();
fmd.setDataFile(dFile);
dFile.getFileMetadatas().add(fmd);
fmd.setLabel(fileName);
fmd.setCategory(dFile.getContentType());
if (editVersion.getFileMetadatas() == null) {
editVersion.setFileMetadatas(new ArrayList());
}
editVersion.getFileMetadatas().add(fmd);
fmd.setDatasetVersion(editVersion);
dataset.getFiles().add(dFile);
// When uploading files from dropBox, we don't get the benefit of
// having the browser recognize the mime type of the file. So we'll
// have to rely on our own utilities (Jhove, etc.) to try and determine
// what it is.
String fileType = null;
try {
fileType = FileUtil.determineFileType(Paths.get(ingestService.getFilesTempDirectory(), dFile.getFileSystemName()).toFile(), fileName);
logger.fine("File utility recognized the file as " + fileType);
if (fileType != null && !fileType.equals("")) {
dFile.setContentType(fileType);
}
} catch (IOException ex) {
logger.warning("Failed to run the file utility mime type check on file " + fileName);
}
newFiles.add(dFile);
}
}
#location 64
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public String getCitation(boolean isOnlineVersion, DatasetVersion version) {
return version.getCitation(isOnlineVersion);
}
|
#vulnerable code
public String getCitation(boolean isOnlineVersion, DatasetVersion version) {
String str = "";
boolean includeAffiliation = false;
String authors = version.getAuthorsStr(includeAffiliation);
if (!StringUtil.isEmpty(authors)) {
str += authors;
}
if (this.getPublicationDate() == null || StringUtil.isEmpty(this.getPublicationDate().toString())) {
//if not released use current year
if (!StringUtil.isEmpty(str)) {
str += ", ";
}
str += new SimpleDateFormat("yyyy").format(new Timestamp(new Date().getTime())) ;
} else {
if (!StringUtil.isEmpty(str)) {
str += ", ";
}
str += new SimpleDateFormat("yyyy").format(new Timestamp(this.getPublicationDate().getTime()));
}
if ( version.getTitle() != null ) {
if (!StringUtil.isEmpty(version.getTitle())) {
if (!StringUtil.isEmpty(str)) {
str += ", ";
}
str += "\"" + version.getTitle() + "\"";
}
}
if (!StringUtil.isEmpty(this.getIdentifier())) {
if (!StringUtil.isEmpty(str)) {
str += ", ";
}
if (isOnlineVersion) {
str += "<a href=\"" + this.getPersistentURL() + "\">" + this.getIdentifier() + "</a>";
} else {
str += this.getPersistentURL();
}
}
//Get root dataverse name for Citation
Dataverse root = this.getOwner();
while (root.getOwner() != null) {
root = root.getOwner();
}
String rootDataverseName = root.getName();
if (!StringUtil.isEmpty(rootDataverseName)) {
if (!StringUtil.isEmpty(str)) {
str += ", ";
}
str += " " + rootDataverseName + " ";
}
if (version.getVersionNumber() != null) {
if (!StringUtil.isEmpty(str)) {
str += ", ";
}
str += " V" + version.getVersionNumber();
str += " [Version]";
}
/*UNF is not calculated yet
if (!StringUtil.isEmpty(getUNF())) {
if (!StringUtil.isEmpty(str)) {
str += " ";
}
str += getUNF();
}
String distributorNames = getDistributorNames();
if (distributorNames.trim().length() > 0) {
str += " " + distributorNames;
str += " [Distributor]";
}*/
return str;
}
#location 46
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public DepositReceipt getEntry(String uri, Map<String, String> map, AuthCredentials authCredentials, SwordConfiguration swordConfiguration) throws SwordServerException, SwordError, SwordAuthException {
DataverseUser dataverseUser = swordAuth.auth(authCredentials);
logger.fine("getEntry called with url: " + uri);
urlManager.processUrl(uri);
String targetType = urlManager.getTargetType();
if (!targetType.isEmpty()) {
logger.fine("operating on target type: " + urlManager.getTargetType());
if ("study".equals(targetType)) {
String globalId = urlManager.getTargetIdentifier();
Dataset dataset = null;
try {
dataset = datasetService.findByGlobalId(globalId);
} catch (EJBException ex) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find study based on global id (" + globalId + ") in URL: " + uri);
}
if (dataset != null) {
Dataverse dvThatOwnsStudy = dataset.getOwner();
if (swordAuth.hasAccessToModifyDataverse(dataverseUser, dvThatOwnsStudy)) {
ReceiptGenerator receiptGenerator = new ReceiptGenerator();
String baseUrl = urlManager.getHostnamePlusBaseUrlPath(uri);
DepositReceipt depositReceipt = receiptGenerator.createReceipt(baseUrl, dataset);
if (depositReceipt != null) {
return depositReceipt;
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not generate deposit receipt.");
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + dataverseUser.getUserName() + " is not authorized to retrieve entry for " + dataset.getGlobalId());
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find study based on URL: " + uri);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unsupported target type (" + targetType + ") in URL: " + uri);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to determine target type from URL: " + uri);
}
}
|
#vulnerable code
@Override
public DepositReceipt getEntry(String uri, Map<String, String> map, AuthCredentials authCredentials, SwordConfiguration swordConfiguration) throws SwordServerException, SwordError, SwordAuthException {
DataverseUser dataverseUser = swordAuth.auth(authCredentials);
logger.fine("getEntry called with url: " + uri);
urlManager.processUrl(uri);
String targetType = urlManager.getTargetType();
if (!targetType.isEmpty()) {
logger.fine("operating on target type: " + urlManager.getTargetType());
if ("study".equals(targetType)) {
String globalId = urlManager.getTargetIdentifier();
Dataset dataset = null;
try {
/**
* @todo don't hard code this, obviously. In DVN 3.x we had
* a method for studyService.getStudyByGlobalId(globalId)
*/
// study = studyService.getStudyByGlobalId(globalId);
long databaseIdForRoastingAtHomeDataset = 10;
dataset = datasetService.find(databaseIdForRoastingAtHomeDataset);
} catch (EJBException ex) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find study based on global id (" + globalId + ") in URL: " + uri);
}
if (dataset != null) {
Dataverse dvThatOwnsStudy = dataset.getOwner();
if (swordAuth.hasAccessToModifyDataverse(dataverseUser, dvThatOwnsStudy)) {
ReceiptGenerator receiptGenerator = new ReceiptGenerator();
String baseUrl = urlManager.getHostnamePlusBaseUrlPath(uri);
DepositReceipt depositReceipt = receiptGenerator.createReceipt(baseUrl, dataset);
if (depositReceipt != null) {
return depositReceipt;
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not generate deposit receipt.");
}
} else {
/**
* @todo need study.getGlobalId() from DVN 3.x
*/
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + dataverseUser.getUserName() + " is not authorized to retrieve entry for " + dataset.getIdentifier());
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find study based on URL: " + uri);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unsupported target type (" + targetType + ") in URL: " + uri);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to determine target type from URL: " + uri);
}
}
#location 25
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public String save() {
dataset.setOwner(dataverseService.find(ownerId));
//TODO get real application-wide protocol/authority
dataset.setProtocol("doi");
dataset.setAuthority("10.5072/FK2");
dataset.setIdentifier("5555");
//TODO add replication for logic if necessary
if (replicationFor){
//dataset.getVersions().get(0).getDatasetFields().
}
//Todo pre populate deposit date
//If new ds get create date user
if (dataset.getId() == null){
dataset.setCreator(session.getUser());
dataset.setCreateDate(new Timestamp(new Date().getTime()));
}
if (!(dataset.getVersions().get(0).getFileMetadatas() == null) && !dataset.getVersions().get(0).getFileMetadatas().isEmpty()) {
int fmdIndex = 0;
for (FileMetadata fmd : dataset.getVersions().get(0).getFileMetadatas()) {
for (FileMetadata fmdTest : editVersion.getFileMetadatas()) {
if (fmd.equals(fmdTest)) {
dataset.getVersions().get(0).getFileMetadatas().get(fmdIndex).setDataFile(fmdTest.getDataFile());
}
}
fmdIndex++;
}
}
/*
* Save and/or ingest files, if there are any:
*/
if (newFiles != null && newFiles.size() > 0) {
try {
if (dataset.getFileSystemDirectory() != null && !Files.exists(dataset.getFileSystemDirectory())) {
/* Note that "createDirectories()" must be used - not
* "createDirectory()", to make sure all the parent
* directories that may not yet exist are created as well.
*/
Files.createDirectories(dataset.getFileSystemDirectory());
}
} catch (IOException dirEx) {
Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, "Failed to create study directory " + dataset.getFileSystemDirectory().toString());
}
if (dataset.getFileSystemDirectory() != null && Files.exists(dataset.getFileSystemDirectory())) {
for (UploadedFile uFile : newFiles.keySet()) {
DataFile dFile = newFiles.get(uFile);
String tempFileLocation = getFilesTempDirectory() + "/" + dFile.getFileSystemName();
boolean ingestedAsTabular = false;
boolean metadataExtracted = false;
datasetService.generateFileSystemName(dFile);
if (ingestService.ingestableAsTabular(dFile)) {
/*
try {
ingestedAsTabular = ingestService.ingestAsTabular(tempFileLocation, dFile);
dFile.setContentType("text/tab-separated-values");
} catch (IOException iex) {
Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, null, iex);
ingestedAsTabular = false;
}
*/
dFile.SetIngestScheduled();
} else if (ingestService.fileMetadataExtractable(dFile)) {
try {
dFile.setContentType("application/fits");
metadataExtracted = ingestService.extractIndexableMetadata(tempFileLocation, dFile, editVersion);
} catch (IOException mex) {
Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, "Caught exception trying to extract indexable metadata from file " + dFile.getName(), mex);
}
if (metadataExtracted) {
Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Successfully extracted indexable metadata from file " + dFile.getName());
} else {
Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Failed to extract indexable metadata from file " + dFile.getName());
}
}
/* Try to save the file in its permanent location:
* (unless it was already ingested and saved as tabular data)
*/
if (!ingestedAsTabular) {
try {
Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Will attempt to save the file as: " + dFile.getFileSystemLocation().toString());
Files.copy(uFile.getInputstream(), dFile.getFileSystemLocation(), StandardCopyOption.REPLACE_EXISTING);
MD5Checksum md5Checksum = new MD5Checksum();
try {
dFile.setmd5(md5Checksum.CalculateMD5(dFile.getFileSystemLocation().toString()));
} catch (Exception md5ex) {
Logger.getLogger(DatasetPage.class.getName()).log(Level.WARNING, "Could not calculate MD5 signature for the new file " + dFile.getName());
}
} catch (IOException ioex) {
Logger.getLogger(DatasetPage.class.getName()).log(Level.WARNING, "Failed to save the file " + dFile.getFileSystemLocation());
}
}
// Any necessary post-processing:
ingestService.performPostProcessingTasks(dFile);
}
}
}
try {
dataset = datasetService.save(dataset);
} catch (EJBException ex) {
StringBuilder error = new StringBuilder();
error.append(ex + " ");
error.append(ex.getMessage() + " ");
Throwable cause = ex;
while (cause.getCause() != null) {
cause = cause.getCause();
error.append(cause + " ");
error.append(cause.getMessage() + " ");
}
logger.info("Couldn't save dataset: " + error.toString());
FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", " - " + error.toString()));
return null;
}
newFiles.clear();
editMode = null;
// Queue the ingest jobs for asynchronous execution:
for (DataFile dataFile : dataset.getFiles()) {
if (dataFile.isIngestScheduled()) {
dataFile.SetIngestInProgress();
Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Attempting to queue the file " + dataFile.getName() + " for ingest.");
ingestService.asyncIngestAsTabular(dataFile);
}
}
return "/dataset.xhtml?id=" + dataset.getId() + "&faces-redirect=true";
}
|
#vulnerable code
public String save() {
dataset.setOwner(dataverseService.find(ownerId));
//TODO get real application-wide protocol/authority
dataset.setProtocol("doi");
dataset.setAuthority("10.5072/FK2");
dataset.setIdentifier("5555");
//TODO add replication for logic if necessary
if (replicationFor){
//dataset.getVersions().get(0).getDatasetFields().
}
//Todo pre populate deposit date
//If new ds get create date user
if (dataset.getId() == null){
dataset.setCreator(session.getUser());
dataset.setCreateDate(new Timestamp(new Date().getTime()));
}
if (!(dataset.getVersions().get(0).getFileMetadatas() == null) && !dataset.getVersions().get(0).getFileMetadatas().isEmpty()) {
int fmdIndex = 0;
for (FileMetadata fmd : dataset.getVersions().get(0).getFileMetadatas()) {
for (FileMetadata fmdTest : editVersion.getFileMetadatas()) {
if (fmd.equals(fmdTest)) {
dataset.getVersions().get(0).getFileMetadatas().get(fmdIndex).setDataFile(fmdTest.getDataFile());
}
}
fmdIndex++;
}
}
/*
* Save and/or ingest files, if there are any:
*/
if (newFiles != null && newFiles.size() > 0) {
try {
if (dataset.getFileSystemDirectory() != null && !Files.exists(dataset.getFileSystemDirectory())) {
/* Note that "createDirectories()" must be used - not
* "createDirectory()", to make sure all the parent
* directories that may not yet exist are created as well.
*/
Files.createDirectories(dataset.getFileSystemDirectory());
}
} catch (IOException dirEx) {
Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, "Failed to create study directory " + dataset.getFileSystemDirectory().toString());
}
if (dataset.getFileSystemDirectory() != null && Files.exists(dataset.getFileSystemDirectory())) {
for (UploadedFile uFile : newFiles.keySet()) {
DataFile dFile = newFiles.get(uFile);
String tempFileLocation = getFilesTempDirectory() + "/" + dFile.getFileSystemName();
boolean ingestedAsTabular = false;
boolean metadataExtracted = false;
datasetService.generateFileSystemName(dFile);
if (ingestService.ingestableAsTabular(dFile)) {
try {
ingestedAsTabular = ingestService.ingestAsTabular(tempFileLocation, dFile);
dFile.setContentType("text/tab-separated-values");
} catch (IOException iex) {
Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, null, iex);
ingestedAsTabular = false;
}
} else if (ingestService.fileMetadataExtractable(dFile)) {
try {
dFile.setContentType("application/fits");
metadataExtracted = ingestService.extractIndexableMetadata(tempFileLocation, dFile, editVersion);
} catch (IOException mex) {
Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, "Caught exception trying to extract indexable metadata from file " + dFile.getName(), mex);
}
if (metadataExtracted) {
Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Successfully extracted indexable metadata from file " + dFile.getName());
} else {
Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Failed to extract indexable metadata from file " + dFile.getName());
}
}
/* Try to save the file in its permanent location:
* (unless it was already ingested and saved as tabular data)
*/
if (!ingestedAsTabular) {
try {
Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Will attempt to save the file as: " + dFile.getFileSystemLocation().toString());
Files.copy(uFile.getInputstream(), dFile.getFileSystemLocation(), StandardCopyOption.REPLACE_EXISTING);
MD5Checksum md5Checksum = new MD5Checksum();
try {
dFile.setmd5(md5Checksum.CalculateMD5(dFile.getFileSystemLocation().toString()));
} catch (Exception md5ex) {
Logger.getLogger(DatasetPage.class.getName()).log(Level.WARNING, "Could not calculate MD5 signature for the new file " + dFile.getName());
}
} catch (IOException ioex) {
Logger.getLogger(DatasetPage.class.getName()).log(Level.WARNING, "Failed to save the file " + dFile.getFileSystemLocation());
}
}
// Any necessary post-processing:
ingestService.performPostProcessingTasks(dFile);
}
}
}
try {
dataset = datasetService.save(dataset);
} catch (EJBException ex) {
StringBuilder error = new StringBuilder();
error.append(ex + " ");
error.append(ex.getMessage() + " ");
Throwable cause = ex;
while (cause.getCause() != null) {
cause = cause.getCause();
error.append(cause + " ");
error.append(cause.getMessage() + " ");
}
logger.info("Couldn't save dataset: " + error.toString());
FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", " - " + error.toString()));
return null;
}
newFiles.clear();
editMode = null;
return "/dataset.xhtml?id=" + dataset.getId() + "&faces-redirect=true";
}
#location 87
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Path("dsPreview/{datasetId}")
@GET
@Produces({ "image/png" })
public InputStream dsPreview(@PathParam("datasetId") Long datasetId, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) /*throws NotFoundException, ServiceUnavailableException, PermissionDeniedException, AuthorizationRequiredException*/ {
Dataset dataset = datasetService.find(datasetId);
if (dataset != null) {
logger.warning("Preview: dataset service could not locate a Dataset object for id "+datasetId+"!");
return null;
}
String imageThumbFileName = null;
List<DataFile> dataFiles = dataset.getFiles();
for (DataFile dataFile : dataFiles) {
if (dataFile.isImage()) {
imageThumbFileName = ImageThumbConverter.generateImageThumb(dataFile.getFileSystemLocation().toString(), 48);
break;
}
}
if (imageThumbFileName == null) {
imageThumbFileName = getWebappImageResource (DEFAULT_DATASET_ICON);
}
if (imageThumbFileName != null) {
InputStream in;
try {
in = new FileInputStream(imageThumbFileName);
} catch (Exception ex) {
// We don't particularly care what the reason why we have
// failed to access the file was.
// From the point of view of the download subsystem, it's a
// binary operation -- it's either successfull or not.
// If we can't access it for whatever reason, we are saying
// it's 404 NOT FOUND in our HTTP response.
return null;
}
return in;
}
return null;
}
|
#vulnerable code
@Path("dsPreview/{datasetId}")
@GET
@Produces({ "image/png" })
public InputStream dsPreview(@PathParam("datasetId") Long datasetId, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) /*throws NotFoundException, ServiceUnavailableException, PermissionDeniedException, AuthorizationRequiredException*/ {
Dataset dataset = datasetService.find(datasetId);
String imageThumbFileName = null;
List<DataFile> dataFiles = dataset.getFiles();
for (DataFile dataFile : dataFiles) {
if (dataFile.isImage()) {
imageThumbFileName = ImageThumbConverter.generateImageThumb(dataFile.getFileSystemLocation().toString(), 48);
break;
}
}
if (imageThumbFileName == null) {
imageThumbFileName = getWebappImageResource (DEFAULT_DATASET_ICON);
}
if (imageThumbFileName != null) {
InputStream in;
try {
in = new FileInputStream(imageThumbFileName);
} catch (Exception ex) {
// We don't particularly care what the reason why we have
// failed to access the file was.
// From the point of view of the download subsystem, it's a
// binary operation -- it's either successfull or not.
// If we can't access it for whatever reason, we are saying
// it's 404 NOT FOUND in our HTTP response.
return null;
}
return in;
}
return null;
}
#location 14
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public Statement getStatement(String editUri, Map<String, String> map, AuthCredentials authCredentials, SwordConfiguration swordConfiguration) throws SwordServerException, SwordError, SwordAuthException {
this.swordConfiguration = (SwordConfigurationImpl) swordConfiguration;
swordConfiguration = (SwordConfigurationImpl) swordConfiguration;
if (authCredentials == null) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "auth credentials are null");
}
if (swordAuth == null) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "swordAuth is null");
}
DataverseUser vdcUser = swordAuth.auth(authCredentials);
urlManager.processUrl(editUri);
String globalId = urlManager.getTargetIdentifier();
if (urlManager.getTargetType().equals("study") && globalId != null) {
logger.fine("request for sword statement by user " + vdcUser.getUserName());
Dataset study = datasetService.findByGlobalId(globalId);
// try {
// study = studyService.getStudyByGlobalId(globalId);
// } catch (EJBException ex) {
// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find study based on global id (" + globalId + ") in URL: " + editUri);
// }
Long studyId;
try {
studyId = study.getId();
} catch (NullPointerException ex) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "couldn't find study with global ID of " + globalId);
}
Dataverse dvThatOwnsStudy = study.getOwner();
if (swordAuth.hasAccessToModifyDataverse(vdcUser, dvThatOwnsStudy)) {
String feedUri = urlManager.getHostnamePlusBaseUrlPath(editUri) + "/edit/study/" + study.getGlobalId();
/**
* @todo is it safe to use this?
*/
String author = study.getLatestVersion().getAuthorsStr();
String title = study.getLatestVersion().getTitle();
Date lastUpdated = study.getLatestVersion().getLastUpdateTime();
if (lastUpdated == null) {
/**
* @todo why is this date null?
*/
logger.info("why is lastUpdated null?");
lastUpdated = new Date();
}
AtomDate atomDate = new AtomDate(lastUpdated);
// AtomDate atomDate = new AtomDate(study.getLatestVersion().getLastUpdateTime());
String datedUpdated = atomDate.toString();
Statement statement = new AtomStatement(feedUri, author, title, datedUpdated);
Map<String, String> states = new HashMap<String, String>();
states.put("latestVersionState", study.getLatestVersion().getVersionState().toString());
/**
* @todo DVN 3.x had a studyLock. What's the equivalent in 4.0?
*/
// StudyLock lock = study.getStudyLock();
// if (lock != null) {
// states.put("locked", "true");
// states.put("lockedDetail", lock.getDetail());
// states.put("lockedStartTime", lock.getStartTime().toString());
// } else {
// states.put("locked", "false");
// }
statement.setStates(states);
List<FileMetadata> fileMetadatas = study.getLatestVersion().getFileMetadatas();
for (FileMetadata fileMetadata : fileMetadatas) {
DataFile studyFile = fileMetadata.getDataFile();
// We are exposing the filename for informational purposes. The file id is what you
// actually operate on to delete a file, etc.
//
// Replace spaces to avoid IRISyntaxException
String fileNameFinal = fileMetadata.getLabel().replace(' ', '_');
String studyFileUrlString = urlManager.getHostnamePlusBaseUrlPath(editUri) + "/edit-media/file/" + studyFile.getId() + "/" + fileNameFinal;
IRI studyFileUrl;
try {
studyFileUrl = new IRI(studyFileUrlString);
} catch (IRISyntaxException ex) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Invalid URL for file ( " + studyFileUrlString + " ) resulted in " + ex.getMessage());
}
ResourcePart resourcePart = new ResourcePart(studyFileUrl.toString());
/**
* @todo get this working. show the actual file type
*/
// resourcePart.setMediaType(studyFile.getOriginalFileFormat());
resourcePart.setMediaType("application/octet-stream");
/**
* @todo: Why are properties set on a ResourcePart not
* exposed when you GET a Statement?
*/
// Map<String, String> properties = new HashMap<String, String>();
// properties.put("filename", studyFile.getFileName());
// properties.put("category", studyFile.getLatestCategory());
// properties.put("originalFileType", studyFile.getOriginalFileType());
// properties.put("id", studyFile.getId().toString());
// properties.put("UNF", studyFile.getUnf());
// resourcePart.setProperties(properties);
statement.addResource(resourcePart);
}
return statement;
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "user " + vdcUser.getUserName() + " is not authorized to view study with global ID " + globalId);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not determine target type or identifier from URL: " + editUri);
}
}
|
#vulnerable code
@Override
public Statement getStatement(String editUri, Map<String, String> map, AuthCredentials authCredentials, SwordConfiguration swordConfiguration) throws SwordServerException, SwordError, SwordAuthException {
this.swordConfiguration = (SwordConfigurationImpl) swordConfiguration;
swordConfiguration = (SwordConfigurationImpl) swordConfiguration;
if (authCredentials == null) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "auth credentials are null");
}
if (swordAuth == null) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "swordAuth is null");
}
DataverseUser vdcUser = swordAuth.auth(authCredentials);
urlManager.processUrl(editUri);
String globalId = urlManager.getTargetIdentifier();
if (urlManager.getTargetType().equals("study") && globalId != null) {
logger.fine("request for sword statement by user " + vdcUser.getUserName());
// Study study = null;
/**
* @todo don't hard code this, obviously. In DVN 3.x we had a method
* for editStudyService.getStudyByGlobalId(globalId)
*/
// Study study = editStudyService.getStudyByGlobalId(globalId);
long databaseIdForRoastingAtHomeDataset = 10;
Dataset study = datasetService.find(databaseIdForRoastingAtHomeDataset);
// try {
// study = studyService.getStudyByGlobalId(globalId);
// } catch (EJBException ex) {
// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find study based on global id (" + globalId + ") in URL: " + editUri);
// }
Long studyId;
try {
studyId = study.getId();
} catch (NullPointerException ex) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "couldn't find study with global ID of " + globalId);
}
Dataverse dvThatOwnsStudy = study.getOwner();
if (swordAuth.hasAccessToModifyDataverse(vdcUser, dvThatOwnsStudy)) {
/**
* @todo getIdentifier is equivalent to getGlobalId, right?
*/
// String feedUri = urlManager.getHostnamePlusBaseUrlPath(editUri) + "/edit/study/" + study.getGlobalId();
String feedUri = urlManager.getHostnamePlusBaseUrlPath(editUri) + "/edit/study/" + study.getIdentifier();
/**
* @todo is it safe to use this?
*/
String author = study.getLatestVersion().getAuthorsStr();
String title = study.getLatestVersion().getTitle();
Date lastUpdated = study.getLatestVersion().getLastUpdateTime();
if (lastUpdated == null) {
/**
* @todo why is this date null?
*/
logger.info("why is lastUpdated null?");
lastUpdated = new Date();
}
AtomDate atomDate = new AtomDate(lastUpdated);
// AtomDate atomDate = new AtomDate(study.getLatestVersion().getLastUpdateTime());
String datedUpdated = atomDate.toString();
Statement statement = new AtomStatement(feedUri, author, title, datedUpdated);
Map<String, String> states = new HashMap<String, String>();
states.put("latestVersionState", study.getLatestVersion().getVersionState().toString());
/**
* @todo DVN 3.x had a studyLock. What's the equivalent in 4.0?
*/
// StudyLock lock = study.getStudyLock();
// if (lock != null) {
// states.put("locked", "true");
// states.put("lockedDetail", lock.getDetail());
// states.put("lockedStartTime", lock.getStartTime().toString());
// } else {
// states.put("locked", "false");
// }
statement.setStates(states);
List<FileMetadata> fileMetadatas = study.getLatestVersion().getFileMetadatas();
for (FileMetadata fileMetadata : fileMetadatas) {
DataFile studyFile = fileMetadata.getDataFile();
// We are exposing the filename for informational purposes. The file id is what you
// actually operate on to delete a file, etc.
//
// Replace spaces to avoid IRISyntaxException
String fileNameFinal = fileMetadata.getLabel().replace(' ', '_');
String studyFileUrlString = urlManager.getHostnamePlusBaseUrlPath(editUri) + "/edit-media/file/" + studyFile.getId() + "/" + fileNameFinal;
IRI studyFileUrl;
try {
studyFileUrl = new IRI(studyFileUrlString);
} catch (IRISyntaxException ex) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Invalid URL for file ( " + studyFileUrlString + " ) resulted in " + ex.getMessage());
}
ResourcePart resourcePart = new ResourcePart(studyFileUrl.toString());
/**
* @todo get this working. show the actual file type
*/
// resourcePart.setMediaType(studyFile.getOriginalFileFormat());
resourcePart.setMediaType("application/octet-stream");
/**
* @todo: Why are properties set on a ResourcePart not
* exposed when you GET a Statement?
*/
// Map<String, String> properties = new HashMap<String, String>();
// properties.put("filename", studyFile.getFileName());
// properties.put("category", studyFile.getLatestCategory());
// properties.put("originalFileType", studyFile.getOriginalFileType());
// properties.put("id", studyFile.getId().toString());
// properties.put("UNF", studyFile.getUnf());
// resourcePart.setProperties(properties);
statement.addResource(resourcePart);
}
return statement;
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "user " + vdcUser.getUserName() + " is not authorized to view study with global ID " + globalId);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not determine target type or identifier from URL: " + editUri);
}
}
#location 40
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public String save() {
dataset.setOwner(dataverseService.find(ownerId));
//TODO get real application-wide protocol/authority
dataset.setProtocol("doi");
dataset.setAuthority("10.5072/FK2");
dataset.setIdentifier("5555");
/*
* Save and/or ingest files, if there are any:
*/
if (newFiles != null && newFiles.size() > 0) {
try {
if (dataset.getFileSystemDirectory() != null && !Files.exists(dataset.getFileSystemDirectory())) {
/* Note that "createDirectories()" must be used - not
* "createDirectory()", to make sure all the parent
* directories that may not yet exist are created as well.
*/
Files.createDirectories(dataset.getFileSystemDirectory());
}
} catch (IOException dirEx) {
Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, "Failed to create study directory " + dataset.getFileSystemDirectory().toString());
}
if (dataset.getFileSystemDirectory() != null && Files.exists(dataset.getFileSystemDirectory())) {
for (DataFile dFile : newFiles) {
String tempFileLocation = getFilesTempDirectory() + "/" + dFile.getFileSystemName();
// These are all brand new files, so they should all have
// one filemetadata total. You do NOT want to use
// getLatestFilemetadata() here - because it relies on
// comparing the object IDs of the corresponding datasetversions...
// Which may not have been persisted yet.
// -- L.A. 4.0 beta.
FileMetadata fileMetadata = dFile.getFileMetadatas().get(0);
String fileName = fileMetadata.getLabel();
//boolean ingestedAsTabular = false;
boolean metadataExtracted = false;
datasetService.generateFileSystemName(dFile);
if (ingestService.ingestableAsTabular(dFile)) {
/*
* Note that we don't try to ingest the file right away -
* instead we mark it as "scheduled for ingest", then at
* the end of the save process it will be queued for async.
* ingest in the background. In the meantime, the file
* will be ingested as a regular, non-tabular file, and
* appear as such to the user, until the ingest job is
* finished with the Ingest Service.
*/
dFile.SetIngestScheduled();
} else if (ingestService.fileMetadataExtractable(dFile)) {
try {
dFile.setContentType("application/fits");
metadataExtracted = ingestService.extractIndexableMetadata(tempFileLocation, dFile, editVersion);
} catch (IOException mex) {
Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, "Caught exception trying to extract indexable metadata from file " + fileName, mex);
}
if (metadataExtracted) {
Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Successfully extracted indexable metadata from file " + fileName);
} else {
Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Failed to extract indexable metadata from file " + fileName);
}
}
// Try to save the file in its permanent location:
//if (!ingestedAsTabular) {
try {
Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Will attempt to save the file as: " + dFile.getFileSystemLocation().toString());
Files.copy(new FileInputStream(new File(tempFileLocation)), dFile.getFileSystemLocation(), StandardCopyOption.REPLACE_EXISTING);
MD5Checksum md5Checksum = new MD5Checksum();
try {
dFile.setmd5(md5Checksum.CalculateMD5(dFile.getFileSystemLocation().toString()));
} catch (Exception md5ex) {
Logger.getLogger(DatasetPage.class.getName()).log(Level.WARNING, "Could not calculate MD5 signature for the new file " + fileName);
}
} catch (IOException ioex) {
Logger.getLogger(DatasetPage.class.getName()).log(Level.WARNING, "Failed to save the file " + dFile.getFileSystemLocation());
}
//}
// Any necessary post-processing:
ingestService.performPostProcessingTasks(dFile);
}
}
}
Command<Dataset> cmd;
try {
if (editMode == EditMode.CREATE) {
cmd = new CreateDatasetCommand(dataset, session.getUser());
} else {
cmd = new UpdateDatasetCommand(dataset, session.getUser());
}
dataset = commandEngine.submit(cmd);
} catch (EJBException ex) {
StringBuilder error = new StringBuilder();
error.append(ex + " ");
error.append(ex.getMessage() + " ");
Throwable cause = ex;
while (cause.getCause() != null) {
cause = cause.getCause();
error.append(cause + " ");
error.append(cause.getMessage() + " ");
}
logger.info("Couldn't save dataset: " + error.toString());
FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", " - " + error.toString()));
return null;
} catch (CommandException ex) {
FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", " - " + ex.toString()));
Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, null, ex);
}
newFiles.clear();
editMode = null;
// Queue the ingest jobs for asynchronous execution:
for (DataFile dataFile : dataset.getFiles()) {
if (dataFile.isIngestScheduled()) {
dataFile.SetIngestInProgress();
Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Attempting to queue the file " + dataFile.getFileMetadata().getLabel() + " for ingest.");
ingestService.asyncIngestAsTabular(dataFile);
}
}
return "/dataset.xhtml?id=" + dataset.getId() + "&faces-redirect=true";
}
|
#vulnerable code
public String save() {
dataset.setOwner(dataverseService.find(ownerId));
//TODO get real application-wide protocol/authority
dataset.setProtocol("doi");
dataset.setAuthority("10.5072/FK2");
dataset.setIdentifier("5555");
//TODO update title in page itself
if (replicationFor) {
updateTitle();
}
/*
* Save and/or ingest files, if there are any:
*/
if (newFiles != null && newFiles.size() > 0) {
try {
if (dataset.getFileSystemDirectory() != null && !Files.exists(dataset.getFileSystemDirectory())) {
/* Note that "createDirectories()" must be used - not
* "createDirectory()", to make sure all the parent
* directories that may not yet exist are created as well.
*/
Files.createDirectories(dataset.getFileSystemDirectory());
}
} catch (IOException dirEx) {
Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, "Failed to create study directory " + dataset.getFileSystemDirectory().toString());
}
if (dataset.getFileSystemDirectory() != null && Files.exists(dataset.getFileSystemDirectory())) {
for (DataFile dFile : newFiles) {
String tempFileLocation = getFilesTempDirectory() + "/" + dFile.getFileSystemName();
// These are all brand new files, so they should all have
// one filemetadata total. You do NOT want to use
// getLatestFilemetadata() here - because it relies on
// comparing the object IDs of the corresponding datasetversions...
// Which may not have been persisted yet.
// -- L.A. 4.0 beta.
FileMetadata fileMetadata = dFile.getFileMetadatas().get(0);
String fileName = fileMetadata.getLabel();
//boolean ingestedAsTabular = false;
boolean metadataExtracted = false;
datasetService.generateFileSystemName(dFile);
if (ingestService.ingestableAsTabular(dFile)) {
/*
* Note that we don't try to ingest the file right away -
* instead we mark it as "scheduled for ingest", then at
* the end of the save process it will be queued for async.
* ingest in the background. In the meantime, the file
* will be ingested as a regular, non-tabular file, and
* appear as such to the user, until the ingest job is
* finished with the Ingest Service.
*/
dFile.SetIngestScheduled();
} else if (ingestService.fileMetadataExtractable(dFile)) {
try {
dFile.setContentType("application/fits");
metadataExtracted = ingestService.extractIndexableMetadata(tempFileLocation, dFile, editVersion);
} catch (IOException mex) {
Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, "Caught exception trying to extract indexable metadata from file " + fileName, mex);
}
if (metadataExtracted) {
Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Successfully extracted indexable metadata from file " + fileName);
} else {
Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Failed to extract indexable metadata from file " + fileName);
}
}
// Try to save the file in its permanent location:
//if (!ingestedAsTabular) {
try {
Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Will attempt to save the file as: " + dFile.getFileSystemLocation().toString());
Files.copy(new FileInputStream(new File(tempFileLocation)), dFile.getFileSystemLocation(), StandardCopyOption.REPLACE_EXISTING);
MD5Checksum md5Checksum = new MD5Checksum();
try {
dFile.setmd5(md5Checksum.CalculateMD5(dFile.getFileSystemLocation().toString()));
} catch (Exception md5ex) {
Logger.getLogger(DatasetPage.class.getName()).log(Level.WARNING, "Could not calculate MD5 signature for the new file " + fileName);
}
} catch (IOException ioex) {
Logger.getLogger(DatasetPage.class.getName()).log(Level.WARNING, "Failed to save the file " + dFile.getFileSystemLocation());
}
//}
// Any necessary post-processing:
ingestService.performPostProcessingTasks(dFile);
}
}
}
Command<Dataset> cmd;
try {
if (editMode == EditMode.CREATE) {
cmd = new CreateDatasetCommand(dataset, session.getUser());
} else {
cmd = new UpdateDatasetCommand(dataset, session.getUser());
}
dataset = commandEngine.submit(cmd);
} catch (EJBException ex) {
StringBuilder error = new StringBuilder();
error.append(ex + " ");
error.append(ex.getMessage() + " ");
Throwable cause = ex;
while (cause.getCause() != null) {
cause = cause.getCause();
error.append(cause + " ");
error.append(cause.getMessage() + " ");
}
logger.info("Couldn't save dataset: " + error.toString());
FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", " - " + error.toString()));
return null;
} catch (CommandException ex) {
FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", " - " + ex.toString()));
Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, null, ex);
}
newFiles.clear();
editMode = null;
// Queue the ingest jobs for asynchronous execution:
for (DataFile dataFile : dataset.getFiles()) {
if (dataFile.isIngestScheduled()) {
dataFile.SetIngestInProgress();
Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Attempting to queue the file " + dataFile.getFileMetadata().getLabel() + " for ingest.");
ingestService.asyncIngestAsTabular(dataFile);
}
}
return "/dataset.xhtml?id=" + dataset.getId() + "&faces-redirect=true";
}
#location 130
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private DatasetVersion createNewDatasetVersion() {
DatasetVersion dsv = new DatasetVersion();
dsv.setVersionState(DatasetVersion.VersionState.DRAFT);
DatasetVersion latestVersion = getLatestVersion();
//if the latest version has values get them copied over
if (latestVersion.getDatasetFields() != null && !latestVersion.getDatasetFields().isEmpty()) {
dsv.setDatasetFields(dsv.copyDatasetFields(latestVersion.getDatasetFields()));
}
dsv.setFileMetadatas(new ArrayList());
for (FileMetadata fm : latestVersion.getFileMetadatas()) {
FileMetadata newFm = new FileMetadata();
newFm.setCategory(fm.getCategory());
newFm.setDescription(fm.getDescription());
newFm.setLabel(fm.getLabel());
newFm.setDataFile(fm.getDataFile());
newFm.setDatasetVersion(dsv);
dsv.getFileMetadatas().add(newFm);
}
// I'm adding the version to the list so it will be persisted when
// the study object is persisted.
getVersions().add(0, dsv);
dsv.setDataset(this);
return dsv;
}
|
#vulnerable code
private DatasetVersion createNewDatasetVersion() {
DatasetVersion dsv = new DatasetVersion();
dsv.setVersionState(DatasetVersion.VersionState.DRAFT);
DatasetVersion latestVersion = getLatestVersion();
//if the latest version has values get them copied over
if (latestVersion.getDatasetFields() != null && !latestVersion.getDatasetFields().isEmpty()) {
dsv.setDatasetFields(dsv.copyDatasetFields(latestVersion.getDatasetFields()));
}
dsv.setFileMetadatas(new ArrayList());
for (FileMetadata fm : latestVersion.getFileMetadatas()) {
FileMetadata newFm = new FileMetadata();
newFm.setCategory(fm.getCategory());
newFm.setDescription(fm.getDescription());
newFm.setLabel(fm.getLabel());
newFm.setDataFile(fm.getDataFile());
newFm.setDatasetVersion(dsv);
dsv.getFileMetadatas().add(newFm);
}
dsv.setVersionNumber(latestVersion.getVersionNumber() + 1);
// I'm adding the version to the list so it will be persisted when
// the study object is persisted.
getVersions().add(0, dsv);
dsv.setDataset(this);
return dsv;
}
#location 12
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public DataModel getDatasetFieldsDataModel() {
List values = new ArrayList();
int i = 0;
for (DatasetFieldValue dsfv : editVersion.getDatasetFieldValues()){
DatasetField datasetField = dsfv.getDatasetField();
Object[] row = new Object[4];
row[0] = datasetField;
row[1] = getValuesDataModel(dsfv);
row[2] = new Integer(i);
row[3] = datasetField;
values.add(row);
i++;
}
return new ListDataModel(values);
}
|
#vulnerable code
public DataModel getDatasetFieldsDataModel() {
List values = new ArrayList();
int i = 0;
for (DatasetFieldValue dsfv : dataset.getEditVersion().getDatasetFieldValues()){
DatasetField datasetField = dsfv.getDatasetField();
Object[] row = new Object[4];
row[0] = datasetField;
row[1] = getValuesDataModel(datasetField);
row[2] = new Integer(i);
row[3] = datasetField;
values.add(row);
i++;
}
return new ListDataModel(values);
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testFlowWithLoops() throws Exception {
URI resource = getClass().getResource("/yaml/loops/simple_loop.sl").toURI();
URI operation1 = getClass().getResource("/yaml/loops/print.sl").toURI();
Set<SlangSource> path = Sets.newHashSet(SlangSource.fromFile(operation1));
CompilationArtifact compilationArtifact = slang.compile(SlangSource.fromFile(resource), path);
Map<String, Serializable> userInputs = new HashMap<>();
Set<SystemProperty> systemProperties = new HashSet<>();
systemProperties.add(
SystemProperty.createSystemProperty("loop", "for.prop1", "for_value")
);
Map<String, StepData> stepsData = triggerWithData(compilationArtifact, userInputs, systemProperties).getTasks();
StepData firstTask = stepsData.get(FIRST_STEP_PATH);
StepData secondTask = stepsData.get(SECOND_STEP_KEY);
StepData thirdTask = stepsData.get(THIRD_STEP_KEY);
Map<String, Serializable> expectedInputs = new HashMap<>();
expectedInputs.put("text", 1);
expectedInputs.put("sp_arg", "for_value");
Assert.assertEquals(expectedInputs, firstTask.getInputs());
expectedInputs.put("text", 2);
Assert.assertEquals(expectedInputs, secondTask.getInputs());
expectedInputs.put("text", 3);
Assert.assertEquals(expectedInputs, thirdTask.getInputs());
}
|
#vulnerable code
@Test
public void testFlowWithLoops() throws Exception {
URI resource = getClass().getResource("/yaml/loops/simple_loop.sl").toURI();
URI operation1 = getClass().getResource("/yaml/loops/print.sl").toURI();
Set<SlangSource> path = Sets.newHashSet(SlangSource.fromFile(operation1));
CompilationArtifact compilationArtifact = slang.compile(SlangSource.fromFile(resource), path);
Map<String, Serializable> userInputs = new HashMap<>();
Map<String, StepData> stepsData = triggerWithData(compilationArtifact, userInputs, EMPTY_SET).getTasks();
StepData firstTask = stepsData.get(FIRST_STEP_PATH);
StepData secondTask = stepsData.get(SECOND_STEP_KEY);
StepData thirdTask = stepsData.get(THIRD_STEP_KEY);
Assert.assertTrue(firstTask.getInputs().containsValue(1));
Assert.assertTrue(secondTask.getInputs().containsValue(2));
Assert.assertTrue(thirdTask.getInputs().containsValue(3));
}
#location 14
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testValidStatementAndTrim() throws Exception {
ForLoopStatement statement = transformer.transform(" min in collection ");
ListForLoopStatement listForLoopStatement = validateListForLoopStatement(statement);
Assert.assertEquals("min", listForLoopStatement.getVarName());
Assert.assertEquals("collection", listForLoopStatement.getCollectionExpression());
}
|
#vulnerable code
@Test
public void testValidStatementAndTrim() throws Exception {
ForLoopStatement statement = transformer.transform(" min in collection ");
Assert.assertEquals(ForLoopStatement.Type.LIST, statement.getType());
Assert.assertEquals("min", statement.getVarName());
Assert.assertEquals("collection", statement.getCollectionExpression());
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testValidMapStatement() throws Exception {
ForLoopStatement statement = transformer.transform("k, v in collection");
MapForLoopStatement mapForLoopStatement = validateMapForLoopStatement(statement);
Assert.assertEquals("k", mapForLoopStatement.getKeyName());
Assert.assertEquals("v", mapForLoopStatement.getValueName());
Assert.assertEquals("collection", statement.getCollectionExpression());
}
|
#vulnerable code
@Test
public void testValidMapStatement() throws Exception {
ForLoopStatement statement = transformer.transform("k, v in collection");
Assert.assertEquals(ForLoopStatement.Type.MAP, statement.getType());
Assert.assertEquals("k v", statement.getVarName());
Assert.assertEquals("collection", statement.getCollectionExpression());
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testValidStatement() throws Exception {
ForLoopStatement statement = transformer.transform("x in collection");
ListForLoopStatement listForLoopStatement = validateListForLoopStatement(statement);
Assert.assertEquals("x", listForLoopStatement.getVarName());
Assert.assertEquals("collection", listForLoopStatement.getCollectionExpression());
}
|
#vulnerable code
@Test
public void testValidStatement() throws Exception {
ForLoopStatement statement = transformer.transform("x in collection");
Assert.assertEquals(ForLoopStatement.Type.LIST, statement.getType());
Assert.assertEquals("x", statement.getVarName());
Assert.assertEquals("collection", statement.getCollectionExpression());
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static void main(String[] args) {
String repositoryPath = System.getProperty("path");
String testsPath = System.getProperty("testPath");
String testSuitsArg = System.getProperty("testSuits");
Validate.notNull(repositoryPath, "You must pass a path to your repository");
repositoryPath = FilenameUtils.separatorsToSystem(repositoryPath);
Validate.isTrue(new File(repositoryPath).isDirectory(),
"Directory path argument \'" + repositoryPath + "\' does not lead to a directory");
String[] testSuits = null;
if(testSuitsArg != null){
testSuits = testSuitsArg.split(",");
}
ApplicationContext context = new ClassPathXmlApplicationContext("/META-INF/spring/testRunnerContext.xml");
SlangBuild slangBuild = context.getBean(SlangBuild.class);
try {
int numberOfValidSlangFiles = slangBuild.buildSlangContent(repositoryPath, testsPath, testSuits);
System.out.println("SUCCESS: Found " + numberOfValidSlangFiles + " slang files under directory: \"" + repositoryPath + "\" and all are valid.");
System.exit(0);
} catch (Exception e) {
System.out.println(e.getMessage() + "\n\nFAILURE: Validation of slang files under directory: \"" + repositoryPath + "\" failed.");
// TODO - do we want to throw exception or exit with 1?
System.exit(1);
}
}
|
#vulnerable code
public static void main(String[] args) {
String repositoryPath = System.getProperty("path");
String testsPath = System.getProperty("testPath");
String testSuitsArg = System.getProperty("testSuits");
Validate.notNull(repositoryPath, "You must pass a path to your repository");
repositoryPath = FilenameUtils.separatorsToSystem(repositoryPath);
Validate.isTrue(new File(repositoryPath).isDirectory(),
"Directory path argument \'" + repositoryPath + "\' does not lead to a directory");
String[] testSuits = null;
if(testSuitsArg != null){
testSuits = testSuitsArg.split(",");
}
ApplicationContext context = new AnnotationConfigApplicationContext(SlangBuildSpringConfiguration.class);
SlangBuild slangBuild = context.getBean(SlangBuild.class);
try {
int numberOfValidSlangFiles = slangBuild.buildSlangContent(repositoryPath, testsPath, testSuits);
System.out.println("SUCCESS: Found " + numberOfValidSlangFiles + " slang files under directory: \"" + repositoryPath + "\" and all are valid.");
System.exit(0);
} catch (Exception e) {
System.out.println(e.getMessage() + "\n\nFAILURE: Validation of slang files under directory: \"" + repositoryPath + "\" failed.");
// TODO - do we want to throw exception or exit with 1?
System.exit(1);
}
}
#location 15
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public List<Value> bindAsyncLoopList(
AsyncLoopStatement asyncLoopStatement,
Context flowContext,
Set<SystemProperty> systemProperties,
String nodeName) {
Validate.notNull(asyncLoopStatement, "async loop statement cannot be null");
Validate.notNull(flowContext, "flow context cannot be null");
Validate.notNull(systemProperties, "system properties cannot be null");
Validate.notNull(nodeName, "node name cannot be null");
List<Value> result = new ArrayList<>();
try {
Value evalResult = scriptEvaluator.evalExpr(asyncLoopStatement.getExpression(), flowContext.getImmutableViewOfVariables(), systemProperties);
if (evalResult != null && evalResult.get() != null) {
//noinspection unchecked
for (Serializable serializable : ((List<Serializable>)evalResult.get())) {
Value value = ValueFactory.create(serializable, evalResult.isSensitive());
result.add(value);
}
}
} catch (Throwable t) {
throw new RuntimeException(generateAsyncLoopExpressionMessage(nodeName, t.getMessage()), t);
}
if (CollectionUtils.isEmpty(result)) {
throw new RuntimeException(generateAsyncLoopExpressionMessage(nodeName, "expression is empty"));
}
return result;
}
|
#vulnerable code
public List<Value> bindAsyncLoopList(
AsyncLoopStatement asyncLoopStatement,
Context flowContext,
Set<SystemProperty> systemProperties,
String nodeName) {
Validate.notNull(asyncLoopStatement, "async loop statement cannot be null");
Validate.notNull(flowContext, "flow context cannot be null");
Validate.notNull(systemProperties, "system properties cannot be null");
Validate.notNull(nodeName, "node name cannot be null");
List<Value> result = new ArrayList<>();
try {
Value evalResult = scriptEvaluator.evalExpr(asyncLoopStatement.getExpression(), flowContext.getImmutableViewOfVariables(), systemProperties);
if (evalResult.get() != null) {
//noinspection unchecked
for (Serializable serializable : ((List<Serializable>)evalResult.get())) {
Value value = serializable instanceof Value ? (Value)serializable : ValueFactory.create(serializable, evalResult.isSensitive());
result.add(value);
}
}
} catch (Throwable t) {
throw new RuntimeException(generateAsyncLoopExpressionMessage(nodeName, t.getMessage()), t);
}
if (CollectionUtils.isEmpty(result)) {
throw new RuntimeException(generateAsyncLoopExpressionMessage(nodeName, "expression is empty"));
}
return result;
}
#location 14
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testMultipleInsAreTrimmed() throws Exception {
ForLoopStatement statement = transformer.transform(" in in in ");
ListForLoopStatement listForLoopStatement = validateListForLoopStatement(statement);
Assert.assertEquals("in", listForLoopStatement.getCollectionExpression());
}
|
#vulnerable code
@Test
public void testMultipleInsAreTrimmed() throws Exception {
ForLoopStatement statement = transformer.transform(" in in in ");
Assert.assertEquals("in", statement.getVarName());
Assert.assertEquals("in", statement.getCollectionExpression());
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testValidMapStatementAndTrimMultipleWhitSpaces() throws Exception {
ForLoopStatement statement = transformer.transform(" k, v in collection ");
MapForLoopStatement mapForLoopStatement = validateMapForLoopStatement(statement);
Assert.assertEquals("k", mapForLoopStatement.getKeyName());
Assert.assertEquals("v", mapForLoopStatement.getValueName());
Assert.assertEquals("collection", statement.getCollectionExpression());
}
|
#vulnerable code
@Test
public void testValidMapStatementAndTrimMultipleWhitSpaces() throws Exception {
ForLoopStatement statement = transformer.transform(" k, v in collection ");
Assert.assertEquals(ForLoopStatement.Type.MAP, statement.getType());
Assert.assertEquals("k v", statement.getVarName());
Assert.assertEquals("collection", statement.getCollectionExpression());
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void beginTask(@Param(ScoreLangConstants.TASK_INPUTS_KEY) List<Input> taskInputs,
@Param(ScoreLangConstants.LOOP_KEY) LoopStatement loop,
@Param(ScoreLangConstants.RUN_ENV) RunEnvironment runEnv,
@Param(EXECUTION_RUNTIME_SERVICES) ExecutionRuntimeServices executionRuntimeServices,
@Param(ScoreLangConstants.NODE_NAME_KEY) String nodeName,
@Param(ExecutionParametersConsts.RUNNING_EXECUTION_PLAN_ID) Long RUNNING_EXECUTION_PLAN_ID,
@Param(ScoreLangConstants.NEXT_STEP_ID_KEY) Long nextStepId,
@Param(ScoreLangConstants.REF_ID) String refId) {
try {
startStepExecutionPathCalc(runEnv);
runEnv.removeCallArguments();
runEnv.removeReturnValues();
Context flowContext = runEnv.getStack().popContext();
Map<String, Serializable> flowVariables = flowContext.getImmutableViewOfVariables();
fireEvent(executionRuntimeServices, runEnv, ScoreLangConstants.EVENT_INPUT_START, "Task inputs start Binding",
Pair.of(LanguageEventData.BOUND_INPUTS,(Serializable)retrieveInputs(taskInputs)),
Pair.of( LanguageEventData.levelName.TASK_NAME.name(), nodeName));
//loops
if (loopStatementExist(loop)) {
LoopCondition loopCondition = loopsBinding.getOrCreateLoopCondition(loop, flowContext, nodeName);
if (!loopCondition.hasMore()) {
runEnv.putNextStepPosition(nextStepId);
runEnv.getStack().pushContext(flowContext);
return;
}
if (loopCondition instanceof ForLoopCondition) {
ForLoopCondition forLoopCondition = (ForLoopCondition) loopCondition;
if (loop instanceof ListForLoopStatement) {
// normal iteration
String varName = ((ListForLoopStatement) loop).getVarName();
loopsBinding.incrementListForLoop(varName, flowContext, forLoopCondition);
} else {
// map iteration
MapForLoopStatement mapForLoopStatement = (MapForLoopStatement) loop;
String keyName = mapForLoopStatement.getKeyName();
String valueName = mapForLoopStatement.getValueName();
loopsBinding.incrementMapForLoop(keyName, valueName, flowContext, forLoopCondition);
}
}
}
// Map<String, Serializable> flowVariables = flowContext.getImmutableViewOfVariables();
Map<String, Serializable> operationArguments = inputsBinding.bindInputs(taskInputs, flowVariables, runEnv.getSystemProperties());
//todo: hook
sendBindingInputsEvent(taskInputs, operationArguments, runEnv, executionRuntimeServices, "Task inputs resolved",
nodeName, LanguageEventData.levelName.TASK_NAME);
updateCallArgumentsAndPushContextToStack(runEnv, flowContext, operationArguments);
// request the score engine to switch to the execution plan of the given ref
requestSwitchToRefExecutableExecutionPlan(runEnv, executionRuntimeServices, RUNNING_EXECUTION_PLAN_ID, refId, nextStepId);
// set the start step of the given ref as the next step to execute (in the new running execution plan that will be set)
runEnv.putNextStepPosition(executionRuntimeServices.getSubFlowBeginStep(refId));
// runEnv.getExecutionPath().down();
} catch (RuntimeException e) {
logger.error("There was an error running the begin task execution step of: \'" + nodeName + "\'. Error is: " + e.getMessage());
throw new RuntimeException("Error running: " + nodeName + ": " + e.getMessage(), e);
}
}
|
#vulnerable code
public void beginTask(@Param(ScoreLangConstants.TASK_INPUTS_KEY) List<Input> taskInputs,
@Param(ScoreLangConstants.LOOP_KEY) LoopStatement loop,
@Param(ScoreLangConstants.RUN_ENV) RunEnvironment runEnv,
@Param(EXECUTION_RUNTIME_SERVICES) ExecutionRuntimeServices executionRuntimeServices,
@Param(ScoreLangConstants.NODE_NAME_KEY) String nodeName,
@Param(ExecutionParametersConsts.RUNNING_EXECUTION_PLAN_ID) Long RUNNING_EXECUTION_PLAN_ID,
@Param(ScoreLangConstants.NEXT_STEP_ID_KEY) Long nextStepId,
@Param(ScoreLangConstants.REF_ID) String refId) {
try {
runEnv.getExecutionPath().forward();
runEnv.removeCallArguments();
runEnv.removeReturnValues();
Context flowContext = runEnv.getStack().popContext();
//loops
if (loopStatementExist(loop)) {
LoopCondition loopCondition = loopsBinding.getOrCreateLoopCondition(loop, flowContext, nodeName);
if (!loopCondition.hasMore()) {
runEnv.putNextStepPosition(nextStepId);
runEnv.getStack().pushContext(flowContext);
return;
}
if (loopCondition instanceof ForLoopCondition) {
ForLoopCondition forLoopCondition = (ForLoopCondition) loopCondition;
if (loop instanceof ListForLoopStatement) {
// normal iteration
String varName = ((ListForLoopStatement) loop).getVarName();
loopsBinding.incrementListForLoop(varName, flowContext, forLoopCondition);
} else {
// map iteration
MapForLoopStatement mapForLoopStatement = (MapForLoopStatement) loop;
String keyName = mapForLoopStatement.getKeyName();
String valueName = mapForLoopStatement.getValueName();
loopsBinding.incrementMapForLoop(keyName, valueName, flowContext, forLoopCondition);
}
}
}
Map<String, Serializable> flowVariables = flowContext.getImmutableViewOfVariables();
Map<String, Serializable> operationArguments = inputsBinding.bindInputs(taskInputs, flowVariables, runEnv.getSystemProperties());
//todo: hook
sendBindingInputsEvent(taskInputs, operationArguments, runEnv, executionRuntimeServices, "Task inputs resolved",
nodeName, LanguageEventData.levelName.TASK_NAME);
updateCallArgumentsAndPushContextToStack(runEnv, flowContext, operationArguments);
// request the score engine to switch to the execution plan of the given ref
requestSwitchToRefExecutableExecutionPlan(runEnv, executionRuntimeServices, RUNNING_EXECUTION_PLAN_ID, refId, nextStepId);
// set the start step of the given ref as the next step to execute (in the new running execution plan that will be set)
runEnv.putNextStepPosition(executionRuntimeServices.getSubFlowBeginStep(refId));
runEnv.getExecutionPath().down();
} catch (RuntimeException e) {
logger.error("There was an error running the begin task execution step of: \'" + nodeName + "\'. Error is: " + e.getMessage());
throw new RuntimeException("Error running: " + nodeName + ": " + e.getMessage(), e);
}
}
#location 18
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public ReturnValues getExecutionReturnValues(){
// if(StringUtils.isEmpty(result)){
// throw new RuntimeException("Result of executing the test " + testCaseName + " cannot be empty");
// }
return new ReturnValues(outputs, result);
}
|
#vulnerable code
public ReturnValues getExecutionReturnValues(){
if(StringUtils.isEmpty(result)){
throw new RuntimeException("Result of executing the test " + testCaseName + " cannot be empty");
}
if (outputs == null){
outputs = new HashMap<>();
}
return new ReturnValues(outputs, result);
}
#location 6
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test(timeout = DEFAULT_TIMEOUT)
public void doJavaActionSetKeyOnNonSerializableSessionTest() {
//prepare doAction arguments
RunEnvironment runEnv = new RunEnvironment();
HashMap<String, Object> nonSerializableExecutionData = new HashMap<>();
GlobalSessionObject<ContentTestActions.NonSerializableObject> sessionObject = new GlobalSessionObject<>();
ContentTestActions.NonSerializableObject employee = new ContentTestActions.NonSerializableObject("John");
sessionObject.setResource(new ContentTestActions.NonSerializableSessionResource(employee));
nonSerializableExecutionData.put("name", sessionObject);
Map<String, Serializable> initialCallArguments = new HashMap<>();
initialCallArguments.put("value", "David");
runEnv.putCallArguments(initialCallArguments);
//invoke doAction
actionSteps.doAction(runEnv, nonSerializableExecutionData, JAVA, ContentTestActions.class.getName(),
"setNameOnNonSerializableSession", executionRuntimeServicesMock, null, 2L);
Assert.assertTrue(nonSerializableExecutionData.containsKey("name"));
@SuppressWarnings("unchecked")
GlobalSessionObject<ContentTestActions.NonSerializableObject> updatedSessionObject =
(GlobalSessionObject<ContentTestActions.NonSerializableObject>) nonSerializableExecutionData.get("name");
ContentTestActions.NonSerializableObject nonSerializableObject = updatedSessionObject.get();
String actualName = nonSerializableObject.getName();
Assert.assertEquals("David", actualName);
}
|
#vulnerable code
@Test(timeout = DEFAULT_TIMEOUT)
public void doJavaActionSetKeyOnNonSerializableSessionTest() {
//prepare doAction arguments
RunEnvironment runEnv = new RunEnvironment();
HashMap<String, Object> nonSerializableExecutionData = new HashMap<>();
GlobalSessionObject<ContentTestActions.NonSerializableObject> sessionObject = new GlobalSessionObject<>();
ContentTestActions.NonSerializableObject employee = new ContentTestActions.NonSerializableObject("John");
sessionObject.setResource(new ContentTestActions.NonSerializableSessionResource(employee));
nonSerializableExecutionData.put("name", sessionObject);
Map<String, Serializable> initialCallArguments = new HashMap<>();
initialCallArguments.put("value", "David");
runEnv.putCallArguments(initialCallArguments);
//invoke doAction
actionSteps.doAction(runEnv, nonSerializableExecutionData, JAVA, ContentTestActions.class.getName(),
"setNameOnNonSerializableSession", executionRuntimeServicesMock, null, 2L);
Map<String, Serializable> outputs = runEnv.removeReturnValues().getOutputs();
Assert.assertTrue(outputs.containsKey("name"));
Assert.assertEquals("David", outputs.get("name"));
}
#location 18
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testFlowWithLoops() throws Exception {
URI resource = getClass().getResource("/yaml/loops/simple_loop.sl").toURI();
URI operation1 = getClass().getResource("/yaml/loops/print.sl").toURI();
Set<SlangSource> path = Sets.newHashSet(SlangSource.fromFile(operation1));
CompilationArtifact compilationArtifact = slang.compile(SlangSource.fromFile(resource), path);
Map<String, Serializable> userInputs = new HashMap<>();
Set<SystemProperty> systemProperties = new HashSet<>();
systemProperties.add(
SystemProperty.createSystemProperty("loop", "for.prop1", "for_value")
);
Map<String, StepData> stepsData = triggerWithData(compilationArtifact, userInputs, systemProperties).getTasks();
StepData firstTask = stepsData.get(FIRST_STEP_PATH);
StepData secondTask = stepsData.get(SECOND_STEP_KEY);
StepData thirdTask = stepsData.get(THIRD_STEP_KEY);
Map<String, Serializable> expectedInputs = new HashMap<>();
expectedInputs.put("text", 1);
expectedInputs.put("sp_arg", "for_value");
Assert.assertEquals(expectedInputs, firstTask.getInputs());
expectedInputs.put("text", 2);
Assert.assertEquals(expectedInputs, secondTask.getInputs());
expectedInputs.put("text", 3);
Assert.assertEquals(expectedInputs, thirdTask.getInputs());
}
|
#vulnerable code
@Test
public void testFlowWithLoops() throws Exception {
URI resource = getClass().getResource("/yaml/loops/simple_loop.sl").toURI();
URI operation1 = getClass().getResource("/yaml/loops/print.sl").toURI();
Set<SlangSource> path = Sets.newHashSet(SlangSource.fromFile(operation1));
CompilationArtifact compilationArtifact = slang.compile(SlangSource.fromFile(resource), path);
Map<String, Serializable> userInputs = new HashMap<>();
Map<String, StepData> stepsData = triggerWithData(compilationArtifact, userInputs, EMPTY_SET).getTasks();
StepData firstTask = stepsData.get(FIRST_STEP_PATH);
StepData secondTask = stepsData.get(SECOND_STEP_KEY);
StepData thirdTask = stepsData.get(THIRD_STEP_KEY);
Assert.assertTrue(firstTask.getInputs().containsValue(1));
Assert.assertTrue(secondTask.getInputs().containsValue(2));
Assert.assertTrue(thirdTask.getInputs().containsValue(3));
}
#location 15
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testValidMapStatementWithoutSpaceAfterComma() throws Exception {
ForLoopStatement statement = transformer.transform("k,v in collection");
MapForLoopStatement mapForLoopStatement = validateMapForLoopStatement(statement);
Assert.assertEquals("k", mapForLoopStatement.getKeyName());
Assert.assertEquals("v", mapForLoopStatement.getValueName());
Assert.assertEquals("collection", statement.getCollectionExpression());
}
|
#vulnerable code
@Test
public void testValidMapStatementWithoutSpaceAfterComma() throws Exception {
ForLoopStatement statement = transformer.transform("k,v in collection");
Assert.assertEquals(ForLoopStatement.Type.MAP, statement.getType());
Assert.assertEquals("k v", statement.getVarName());
Assert.assertEquals("collection", statement.getCollectionExpression());
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testFlowWithLoopsWithCustomNavigation() throws Exception {
URI resource = getClass().getResource("/yaml/loops/loop_with_custom_navigation.sl").toURI();
URI operation1 = getClass().getResource("/yaml/loops/print.sl").toURI();
Set<SlangSource> path = Sets.newHashSet(SlangSource.fromFile(operation1));
CompilationArtifact compilationArtifact = slang.compile(SlangSource.fromFile(resource), path);
Map<String, Serializable> userInputs = new HashMap<>();
Map<String, StepData> stepsData = triggerWithData(compilationArtifact, userInputs, EMPTY_SET).getSteps();
StepData thirdStep = stepsData.get(THIRD_STEP_KEY);
Assert.assertEquals("print_other_values", thirdStep.getName());
}
|
#vulnerable code
@Test
public void testFlowWithLoopsWithCustomNavigation() throws Exception {
URI resource = getClass().getResource("/yaml/loops/loop_with_custom_navigation.sl").toURI();
URI operation1 = getClass().getResource("/yaml/loops/print.sl").toURI();
Set<SlangSource> path = Sets.newHashSet(SlangSource.fromFile(operation1));
CompilationArtifact compilationArtifact = slang.compile(SlangSource.fromFile(resource), path);
Map<String, Serializable> userInputs = new HashMap<>();
Map<String, StepData> stepsData = triggerWithData(compilationArtifact, userInputs, EMPTY_SET).getSteps();
StepData thirdTask = stepsData.get(THIRD_STEP_KEY);
Assert.assertEquals("print_other_values", thirdTask.getName());
}
#location 12
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public Long trigger(SlangTestCase testCase, CompilationArtifact compilationArtifact,
Map<String, ? extends Serializable> inputs,
Map<String, ? extends Serializable> systemProperties) {
String testCaseName = testCase.getName();
String result = testCase.getResult();
//add start event
Set<String> handlerTypes = new HashSet<>();
handlerTypes.add(ScoreLangConstants.EVENT_EXECUTION_FINISHED);
handlerTypes.add(ScoreLangConstants.SLANG_EXECUTION_EXCEPTION);
handlerTypes.add(ScoreLangConstants.EVENT_OUTPUT_END);
handlerTypes.add(EventConstants.SCORE_ERROR_EVENT);
handlerTypes.add(EventConstants.SCORE_FAILURE_EVENT);
handlerTypes.add(EventConstants.SCORE_FINISHED_EVENT);
TriggerTestCaseEventListener testsEventListener = new TriggerTestCaseEventListener(testCaseName, result);
slang.subscribeOnEvents(testsEventListener, handlerTypes);
Long executionId = slang.run(compilationArtifact, inputs, systemProperties);
while (!testsEventListener.isFlowFinished()) {
try {
Thread.sleep(200);
} catch (InterruptedException ignore) {}
}
slang.unSubscribeOnEvents(testsEventListener);
ReturnValues executionReturnValues = testsEventListener.getExecutionReturnValues();
String executionResult = executionReturnValues.getResult();
String errorMessageFlowExecution = testsEventListener.getErrorMessage();
if (BooleanUtils.isTrue(testCase.getThrowsException())) {
if(StringUtils.isBlank(errorMessageFlowExecution)) {
throw new RuntimeException("Failed test: " + testCaseName + " - " + testCase.getDescription() + "\nFlow " + compilationArtifact.getExecutionPlan().getName() +" did not throw an exception as expected");
}
return executionId;
}
if(StringUtils.isNotBlank(errorMessageFlowExecution)){
// unexpected exception occurred during flow execution
throw new RuntimeException("Error occured while running test: " + testCaseName + " - " + testCase.getDescription() + "\n" + errorMessageFlowExecution);
}
if (result != null && !result.equals(executionResult)){
throw new RuntimeException("Failed test: " + testCaseName +" - " + testCase.getDescription() + "\nExpected result: " + result + "\nActual result: " + executionResult);
}
return executionId;
}
|
#vulnerable code
public Long trigger(SlangTestCase testCase, CompilationArtifact compilationArtifact,
Map<String, ? extends Serializable> inputs,
Map<String, ? extends Serializable> systemProperties) {
String testCaseName = testCase.getName();
String result = testCase.getResult();
//add start event
Set<String> handlerTypes = new HashSet<>();
handlerTypes.add(ScoreLangConstants.EVENT_EXECUTION_FINISHED);
handlerTypes.add(ScoreLangConstants.SLANG_EXECUTION_EXCEPTION);
handlerTypes.add(ScoreLangConstants.EVENT_OUTPUT_END);
handlerTypes.add(EventConstants.SCORE_ERROR_EVENT);
handlerTypes.add(EventConstants.SCORE_FAILURE_EVENT);
handlerTypes.add(EventConstants.SCORE_FINISHED_EVENT);
TriggerTestCaseEventListener testsEventListener = new TriggerTestCaseEventListener(testCaseName, result);
slang.subscribeOnEvents(testsEventListener, handlerTypes);
Long executionId = slang.run(compilationArtifact, inputs, systemProperties);
while (!testsEventListener.isFlowFinished()) {
try {
Thread.sleep(200);
} catch (InterruptedException ignore) {}
}
slang.unSubscribeOnEvents(testsEventListener);
ReturnValues executionReturnValues = testsEventListener.getExecutionReturnValues();
String executionResult = executionReturnValues.getResult();
String errorMessageFlowExecution = testsEventListener.getErrorMessage();
if(StringUtils.isBlank(errorMessageFlowExecution) && BooleanUtils.isTrue(testCase.getThrowsException())){
throw new RuntimeException("Failed test: " + testCaseName + " - " + testCase.getDescription() + "\nFlow " + compilationArtifact.getExecutionPlan().getName() +" did not throw an exception as expected");
}
if(StringUtils.isNotBlank(errorMessageFlowExecution) && BooleanUtils.isFalse(testCase.getThrowsException())){
// unexpected exception occurred during flow execution
throw new RuntimeException("Error occured while running test: " + testCaseName + " - " + testCase.getDescription() + "\n" + errorMessageFlowExecution);
}
if (result != null && !executionResult.equals(result)){
throw new RuntimeException("Failed test: " + testCaseName +" - " + testCase.getDescription() + "\nExpected result: " + result + "\nActual result: " + executionResult);
}
return executionId;
}
#location 41
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testFlowWithMapLoopsWithCustomNavigation() throws Exception {
URI resource = getClass().getResource("/yaml/loops/loop_with_custom_navigation_with_map.sl").toURI();
URI operation1 = getClass().getResource("/yaml/loops/print.sl").toURI();
Set<SlangSource> path = Sets.newHashSet(SlangSource.fromFile(operation1));
CompilationArtifact compilationArtifact = slang.compile(SlangSource.fromFile(resource), path);
Map<String, Serializable> userInputs = new HashMap<>();
Map<String, StepData> stepsData = triggerWithData(compilationArtifact, userInputs, EMPTY_SET).getSteps();
StepData fourthStep = stepsData.get(FOURTH_STEP_KEY);
Assert.assertEquals("print_other_values", fourthStep.getName());
}
|
#vulnerable code
@Test
public void testFlowWithMapLoopsWithCustomNavigation() throws Exception {
URI resource = getClass().getResource("/yaml/loops/loop_with_custom_navigation_with_map.sl").toURI();
URI operation1 = getClass().getResource("/yaml/loops/print.sl").toURI();
Set<SlangSource> path = Sets.newHashSet(SlangSource.fromFile(operation1));
CompilationArtifact compilationArtifact = slang.compile(SlangSource.fromFile(resource), path);
Map<String, Serializable> userInputs = new HashMap<>();
Map<String, StepData> stepsData = triggerWithData(compilationArtifact, userInputs, EMPTY_SET).getSteps();
StepData fourthTask = stepsData.get(FOURTH_STEP_KEY);
Assert.assertEquals("print_other_values", fourthTask.getName());
}
#location 12
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testValidMapStatementAndTrim() throws Exception {
ForLoopStatement statement = transformer.transform(" k, v in collection ");
MapForLoopStatement mapForLoopStatement = validateMapForLoopStatement(statement);
Assert.assertEquals("k", mapForLoopStatement.getKeyName());
Assert.assertEquals("v", mapForLoopStatement.getValueName());
Assert.assertEquals("collection", statement.getCollectionExpression());
}
|
#vulnerable code
@Test
public void testValidMapStatementAndTrim() throws Exception {
ForLoopStatement statement = transformer.transform(" k, v in collection ");
Assert.assertEquals(ForLoopStatement.Type.MAP, statement.getType());
Assert.assertEquals("k v", statement.getVarName());
Assert.assertEquals("collection", statement.getCollectionExpression());
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testFlowWithMapLoopsWithBreak() throws Exception {
URI resource = getClass().getResource("/yaml/loops/loop_with_break_with_map.sl").toURI();
URI operation1 = getClass().getResource("/yaml/loops/operation_that_goes_to_custom_when_value_is_2.sl").toURI();
URI operation2 = getClass().getResource("/yaml/loops/print.sl").toURI();
Set<SlangSource> path = Sets.newHashSet(SlangSource.fromFile(operation1), SlangSource.fromFile(operation2));
CompilationArtifact compilationArtifact = slang.compile(SlangSource.fromFile(resource), path);
Map<String, Serializable> userInputs = new HashMap<>();
Map<String, StepData> stepsData = triggerWithData(compilationArtifact, userInputs, EMPTY_SET).getSteps();
List<String> actualSteps = getStepsOnly(stepsData);
Assert.assertEquals(2, actualSteps.size());
StepData secondStep = stepsData.get(SECOND_STEP_KEY);
Assert.assertEquals("print_other_values", secondStep.getName());
}
|
#vulnerable code
@Test
public void testFlowWithMapLoopsWithBreak() throws Exception {
URI resource = getClass().getResource("/yaml/loops/loop_with_break_with_map.sl").toURI();
URI operation1 = getClass().getResource("/yaml/loops/operation_that_goes_to_custom_when_value_is_2.sl").toURI();
URI operation2 = getClass().getResource("/yaml/loops/print.sl").toURI();
Set<SlangSource> path = Sets.newHashSet(SlangSource.fromFile(operation1), SlangSource.fromFile(operation2));
CompilationArtifact compilationArtifact = slang.compile(SlangSource.fromFile(resource), path);
Map<String, Serializable> userInputs = new HashMap<>();
Map<String, StepData> stepsData = triggerWithData(compilationArtifact, userInputs, EMPTY_SET).getSteps();
List<String> actualTasks = getTasksOnly(stepsData);
Assert.assertEquals(2, actualTasks.size());
StepData secondTask = stepsData.get(SECOND_STEP_KEY);
Assert.assertEquals("print_other_values", secondTask.getName());
}
#location 15
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testStepPublishValues() throws Exception {
URL resource = getClass().getResource("/yaml/binding_scope_flow.sl");
URI operation = getClass().getResource("/yaml/binding_scope_op.sl").toURI();
Set<SlangSource> path = Sets.newHashSet(SlangSource.fromFile(operation));
CompilationArtifact compilationArtifact = slang.compile(SlangSource.fromFile(resource.toURI()), path);
Map<String, Value> userInputs = Collections.emptyMap();
Set<SystemProperty> systemProperties = Collections.emptySet();
// trigger ExecutionPlan
RuntimeInformation runtimeInformation = triggerWithData(compilationArtifact, userInputs, systemProperties);
Map<String, StepData> executionData = runtimeInformation.getSteps();
StepData stepData = executionData.get(FIRST_STEP_PATH);
Assert.assertNotNull("step data is null", stepData);
}
|
#vulnerable code
@Test
public void testStepPublishValues() throws Exception {
URL resource = getClass().getResource("/yaml/binding_scope_flow.sl");
URI operation = getClass().getResource("/yaml/binding_scope_op.sl").toURI();
Set<SlangSource> path = Sets.newHashSet(SlangSource.fromFile(operation));
CompilationArtifact compilationArtifact = slang.compile(SlangSource.fromFile(resource.toURI()), path);
Map<String, Value> userInputs = Collections.emptyMap();
Set<SystemProperty> systemProperties = Collections.emptySet();
// trigger ExecutionPlan
RuntimeInformation runtimeInformation = triggerWithData(compilationArtifact, userInputs, systemProperties);
Map<String, StepData> executionData = runtimeInformation.getSteps();
StepData stepData = executionData.get(FIRST_STEP_PATH);
Assert.assertNotNull("step data is null", stepData);
verifyStepPublishValues(stepData);
}
#location 19
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testValues() throws Exception {
// compile
URI resource = getClass().getResource("/yaml/formats/values_flow.sl").toURI();
URI operation1 = getClass().getResource("/yaml/formats/values_op.sl").toURI();
URI operation2 = getClass().getResource("/yaml/noop.sl").toURI();
SlangSource dep1 = SlangSource.fromFile(operation1);
SlangSource dep2 = SlangSource.fromFile(operation2);
Set<SlangSource> path = Sets.newHashSet(dep1, dep2);
CompilationArtifact compilationArtifact = slang.compile(SlangSource.fromFile(resource), path);
// trigger
Map<String, StepData> steps = prepareAndRun(compilationArtifact);
// verify
StepData flowData = steps.get(EXEC_START_PATH);
StepData taskData = steps.get(FIRST_STEP_PATH);
verifyExecutableInputs(flowData);
verifyExecutableOutputs(flowData);
verifyTaskInputs(taskData);
verifyTaskPublishValues(taskData);
verifySuccessResult(flowData);
}
|
#vulnerable code
@Test
public void testValues() throws Exception {
// compile
URI resource = getClass().getResource("/yaml/formats/values_flow.sl").toURI();
URI operation1 = getClass().getResource("/yaml/formats/values_op.sl").toURI();
URI operation2 = getClass().getResource("/yaml/noop.sl").toURI();
SlangSource dep1 = SlangSource.fromFile(operation1);
SlangSource dep2 = SlangSource.fromFile(operation2);
Set<SlangSource> path = Sets.newHashSet(dep1, dep2);
CompilationArtifact compilationArtifact = slang.compile(SlangSource.fromFile(resource), path);
// trigger
Map<String, StepData> steps = prepareAndRun(compilationArtifact);
// verify
StepData flowData = steps.get(EXEC_START_PATH);
StepData taskData = steps.get(FIRST_STEP_PATH);
StepData oneLinerTaskData = steps.get(SECOND_STEP_KEY);
verifyExecutableInputs(flowData);
verifyExecutableOutputs(flowData);
verifyTaskInputs(taskData);
verifyTaskPublishValues(taskData);
verifyOneLinerInputs(oneLinerTaskData);
verifySuccessResult(flowData);
}
#location 25
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testFlowWithMapLoopsWithCustomNavigation() throws Exception {
URI resource = getClass().getResource("/yaml/loops/loop_with_custom_navigation_with_map.sl").toURI();
URI operation1 = getClass().getResource("/yaml/loops/print.sl").toURI();
Set<SlangSource> path = Sets.newHashSet(SlangSource.fromFile(operation1));
CompilationArtifact compilationArtifact = slang.compile(SlangSource.fromFile(resource), path);
Map<String, Serializable> userInputs = new HashMap<>();
Map<String, StepData> stepsData = triggerWithData(compilationArtifact, userInputs, null);
StepData fourthTask = stepsData.get(FOURTH_STEP_KEY);
Assert.assertEquals("print_other_values", fourthTask.getName());
}
|
#vulnerable code
@Test
public void testFlowWithMapLoopsWithCustomNavigation() throws Exception {
URI resource = getClass().getResource("/yaml/loops/loop_with_custom_navigation_with_map.sl").toURI();
URI operation1 = getClass().getResource("/yaml/loops/print.sl").toURI();
Set<SlangSource> path = Sets.newHashSet(SlangSource.fromFile(operation1));
CompilationArtifact compilationArtifact = slang.compile(SlangSource.fromFile(resource), path);
Map<String, Serializable> userInputs = new HashMap<>();
Map<String, StepData> stepsData = triggerWithData(compilationArtifact, userInputs, null);
StepData thirdTask = stepsData.get(FOURTH_STEP_KEY);
Assert.assertEquals("print_other_values", thirdTask.getName());
}
#location 12
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testValidMapStatementWithExpression() throws Exception {
ForLoopStatement statement = transformer.transform("k, v in dictionary.items()");
MapForLoopStatement mapForLoopStatement = validateMapForLoopStatement(statement);
Assert.assertEquals("k", mapForLoopStatement.getKeyName());
Assert.assertEquals("v", mapForLoopStatement.getValueName());
Assert.assertEquals("dictionary.items()", statement.getCollectionExpression());
}
|
#vulnerable code
@Test
public void testValidMapStatementWithExpression() throws Exception {
ForLoopStatement statement = transformer.transform("k, v in dictionary.items()");
Assert.assertEquals(ForLoopStatement.Type.MAP, statement.getType());
Assert.assertEquals("k v", statement.getVarName());
Assert.assertEquals("dictionary.items()", statement.getCollectionExpression());
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testFlowWithLoops() throws Exception {
URI resource = getClass().getResource("/yaml/loops/simple_loop.sl").toURI();
URI operation1 = getClass().getResource("/yaml/loops/print.sl").toURI();
Set<SlangSource> path = Sets.newHashSet(SlangSource.fromFile(operation1));
CompilationArtifact compilationArtifact = slang.compile(SlangSource.fromFile(resource), path);
Map<String, Serializable> userInputs = new HashMap<>();
Set<SystemProperty> systemProperties = new HashSet<>();
systemProperties.add(
SystemProperty.createSystemProperty("loop", "for.prop1", "for_value")
);
Map<String, StepData> stepsData = triggerWithData(compilationArtifact, userInputs, systemProperties).getTasks();
StepData firstTask = stepsData.get(FIRST_STEP_PATH);
StepData secondTask = stepsData.get(SECOND_STEP_KEY);
StepData thirdTask = stepsData.get(THIRD_STEP_KEY);
Map<String, Serializable> expectedInputs = new HashMap<>();
expectedInputs.put("text", 1);
expectedInputs.put("sp_arg", "for_value");
Assert.assertEquals(expectedInputs, firstTask.getInputs());
expectedInputs.put("text", 2);
Assert.assertEquals(expectedInputs, secondTask.getInputs());
expectedInputs.put("text", 3);
Assert.assertEquals(expectedInputs, thirdTask.getInputs());
}
|
#vulnerable code
@Test
public void testFlowWithLoops() throws Exception {
URI resource = getClass().getResource("/yaml/loops/simple_loop.sl").toURI();
URI operation1 = getClass().getResource("/yaml/loops/print.sl").toURI();
Set<SlangSource> path = Sets.newHashSet(SlangSource.fromFile(operation1));
CompilationArtifact compilationArtifact = slang.compile(SlangSource.fromFile(resource), path);
Map<String, Serializable> userInputs = new HashMap<>();
Map<String, StepData> stepsData = triggerWithData(compilationArtifact, userInputs, EMPTY_SET).getTasks();
StepData firstTask = stepsData.get(FIRST_STEP_PATH);
StepData secondTask = stepsData.get(SECOND_STEP_KEY);
StepData thirdTask = stepsData.get(THIRD_STEP_KEY);
Assert.assertTrue(firstTask.getInputs().containsValue(1));
Assert.assertTrue(secondTask.getInputs().containsValue(2));
Assert.assertTrue(thirdTask.getInputs().containsValue(3));
}
#location 16
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testValidStatementWithSpaces() throws Exception {
ForLoopStatement statement = transformer.transform("x in range(0, 9)");
ListForLoopStatement listForLoopStatement = validateListForLoopStatement(statement);
Assert.assertEquals("x", listForLoopStatement.getVarName());
Assert.assertEquals("range(0, 9)", listForLoopStatement.getCollectionExpression());
}
|
#vulnerable code
@Test
public void testValidStatementWithSpaces() throws Exception {
ForLoopStatement statement = transformer.transform("x in range(0, 9)");
Assert.assertEquals(ForLoopStatement.Type.LIST, statement.getType());
Assert.assertEquals("x", statement.getVarName());
Assert.assertEquals("range(0, 9)", statement.getCollectionExpression());
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void testRetryForResultAndThrowable() {
// Given retry for null
exec = new AsyncExecution(callable, new RetryPolicy().retryWhen(null), scheduler, future, null);
// When / Then
assertFalse(exec.complete(null));
assertTrue(exec.retryFor(null, null));
exec.prepare();
assertTrue(exec.retryFor(1, new IllegalArgumentException()));
exec.prepare();
assertFalse(exec.retryFor(1, null));
// Then
assertEquals(exec.getExecutions(), 3);
assertTrue(exec.isComplete());
assertEquals(exec.getLastResult(), Integer.valueOf(1));
assertNull(exec.getLastFailure());
verifyScheduler(2);
verify(future).complete(1, null, true);
// Given 2 max retries
exec = new AsyncExecution(callable, new RetryPolicy().retryWhen(null).withMaxRetries(1), scheduler, future, null);
// When / Then
resetMocks();
assertFalse(exec.complete(null));
assertTrue(exec.retryFor(null, e));
exec.prepare();
assertFalse(exec.retryFor(null, e));
// Then
assertEquals(exec.getExecutions(), 2);
assertTrue(exec.isComplete());
assertNull(exec.getLastResult());
assertEquals(exec.getLastFailure(), e);
verifyScheduler(1);
verify(future).complete(null, e, false);
}
|
#vulnerable code
public void testRetryForResultAndThrowable() {
// Given retry for null
inv = new AsyncExecution(callable, new RetryPolicy().retryWhen(null), scheduler, future, null);
// When / Then
assertFalse(inv.complete(null));
assertTrue(inv.retryFor(null, null));
inv.prepare();
assertTrue(inv.retryFor(1, new IllegalArgumentException()));
inv.prepare();
assertFalse(inv.retryFor(1, null));
// Then
assertEquals(inv.getExecutions(), 3);
assertTrue(inv.isComplete());
assertEquals(inv.getLastResult(), Integer.valueOf(1));
assertNull(inv.getLastFailure());
verifyScheduler(2);
verify(future).complete(1, null, true);
// Given 2 max retries
inv = new AsyncExecution(callable, new RetryPolicy().retryWhen(null).withMaxRetries(1), scheduler, future, null);
// When / Then
resetMocks();
assertFalse(inv.complete(null));
assertTrue(inv.retryFor(null, e));
inv.prepare();
assertFalse(inv.retryFor(null, e));
// Then
assertEquals(inv.getExecutions(), 2);
assertTrue(inv.isComplete());
assertNull(inv.getLastResult());
assertEquals(inv.getLastFailure(), e);
verifyScheduler(1);
verify(future).complete(null, e, false);
}
#location 8
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void testCompleteOrRetry() {
// Given retry on IllegalArgumentException
exec = new AsyncExecution(callable, scheduler, future, configFor(new RetryPolicy()));
// When / Then
exec.completeOrRetry(null, e);
assertFalse(exec.isComplete());
exec.before();
exec.completeOrRetry(null, null);
// Then
assertEquals(exec.getExecutions(), 2);
assertTrue(exec.isComplete());
assertNull(exec.getLastResult());
assertNull(exec.getLastFailure());
verifyScheduler(1);
verify(future).complete(null, null, null, true);
}
|
#vulnerable code
public void testCompleteOrRetry() {
// Given retry on IllegalArgumentException
exec = new AsyncExecution(callable, scheduler, future, configFor(new RetryPolicy()));
// When / Then
exec.completeOrRetry(null, e);
assertFalse(exec.isComplete());
exec.before();
exec.completeOrRetry(null, null);
// Then
assertEquals(exec.getExecutions(), 2);
assertTrue(exec.isComplete());
assertNull(exec.getLastResult());
assertNull(exec.getLastFailure());
verifyScheduler(1);
verify(future).complete(null, null, null);
}
#location 8
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
try {
// check uncommitted changes
checkUncommittedChanges();
// git for-each-ref --format='%(refname:short)' refs/heads/hotfix/*
final String hotfixBranches = executeGitCommandReturn(
"for-each-ref", "--format=\"%(refname:short)\"",
"refs/heads/" + gitFlowConfig.getHotfixBranchPrefix() + "*");
if (StringUtils.isBlank(hotfixBranches)) {
throw new MojoFailureException("There is no hotfix branches.");
}
String[] branches = hotfixBranches.split("\\r?\\n");
List<String> numberedList = new ArrayList<String>();
StringBuffer str = new StringBuffer(
"hotfix branch name to finish: [");
for (int i = 0; i < branches.length; i++) {
str.append((i + 1) + ". " + branches[i] + " ");
numberedList.add("" + (i + 1));
}
str.append("]");
String hotfixNumber = null;
try {
while (StringUtils.isBlank(hotfixNumber)) {
hotfixNumber = prompter
.prompt(str.toString(), numberedList);
}
} catch (PrompterException e) {
getLog().error(e);
}
String hotfixName = null;
if (hotfixNumber != null) {
int num = Integer.parseInt(hotfixNumber);
hotfixName = branches[num - 1];
}
if (StringUtils.isBlank(hotfixName)) {
throw new MojoFailureException(
"Hotfix name to finish is blank.");
}
// git checkout master
executeGitCommand("checkout", gitFlowConfig.getProductionBranch());
// git merge --no-ff hotfix/...
executeGitCommand("merge", "--no-ff", hotfixName);
// git tag -a ...
executeGitCommand(
"tag",
"-a",
gitFlowConfig.getVersionTagPrefix()
+ hotfixName.replaceFirst(
gitFlowConfig.getHotfixBranchPrefix(), ""),
"-m", "tagging hotfix");
// check whether release branch exists
// git for-each-ref --count=1 --format="%(refname:short)"
// refs/heads/release/*
final String releaseBranch = executeGitCommandReturn(
"for-each-ref", "--count=1",
"--format=\"%(refname:short)\"", "refs/heads/"
+ gitFlowConfig.getReleaseBranchPrefix() + "*");
// if release branch exists merge hotfix changes into it
if (StringUtils.isNotBlank(releaseBranch)) {
// git checkout release
executeGitCommand("checkout", releaseBranch);
// git merge --no-ff hotfix/...
executeGitCommand("merge", "--no-ff", hotfixName);
} else {
// git checkout develop
executeGitCommand("checkout",
gitFlowConfig.getDevelopmentBranch());
// git merge --no-ff hotfix/...
executeGitCommand("merge", "--no-ff", hotfixName);
// get current project version from pom
String currentVersion = getCurrentProjectVersion();
String nextSnapshotVersion = null;
// get next snapshot version
try {
DefaultVersionInfo versionInfo = new DefaultVersionInfo(
currentVersion);
nextSnapshotVersion = versionInfo.getNextVersion()
.getSnapshotVersionString();
} catch (VersionParseException e) {
if (getLog().isDebugEnabled()) {
getLog().debug(e);
}
}
if (StringUtils.isBlank(nextSnapshotVersion)) {
throw new MojoFailureException(
"Next snapshot version is blank.");
}
// mvn versions:set -DnewVersion=... -DgenerateBackupPoms=false
executeMvnCommand(VERSIONS_MAVEN_PLUGIN + ":set",
"-DnewVersion=" + nextSnapshotVersion,
"-DgenerateBackupPoms=false");
// git commit -a -m updating poms for next development version
executeGitCommand("commit", "-a", "-m",
"updating poms for next development version");
}
// git branch -d hotfix/...
executeGitCommand("branch", "-d", hotfixName);
} catch (CommandLineException e) {
e.printStackTrace();
}
}
|
#vulnerable code
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
try {
// check uncommitted changes
checkUncommittedChanges();
// git for-each-ref --format='%(refname:short)' refs/heads/hotfix/*
final String hotfixBranches = executeGitCommandReturn(
"for-each-ref", "--format=\"%(refname:short)\"",
"refs/heads/" + gitFlowConfig.getHotfixBranchPrefix() + "*");
if (StringUtils.isBlank(hotfixBranches)) {
throw new MojoFailureException("There is no hotfix branches.");
}
String[] branches = hotfixBranches.split("\\r?\\n");
List<String> numberedList = new ArrayList<String>();
StringBuffer str = new StringBuffer(
"hotfix branch name to finish: [");
for (int i = 0; i < branches.length; i++) {
str.append((i + 1) + ". " + branches[i] + " ");
numberedList.add("" + (i + 1));
}
str.append("]");
String hotfixNumber = null;
try {
while (StringUtils.isBlank(hotfixNumber)) {
hotfixNumber = prompter
.prompt(str.toString(), numberedList);
}
} catch (PrompterException e) {
getLog().error(e);
}
String hotfixName = null;
if (hotfixNumber != null) {
int num = Integer.parseInt(hotfixNumber);
hotfixName = branches[num - 1];
}
if (StringUtils.isBlank(hotfixName)) {
throw new MojoFailureException(
"Hotfix name to finish is blank.");
}
// git checkout master
executeGitCommand("checkout", gitFlowConfig.getProductionBranch());
// git merge --no-ff hotfix/...
executeGitCommand("merge", "--no-ff", hotfixName);
// git tag -a ...
executeGitCommand(
"tag",
"-a",
gitFlowConfig.getVersionTagPrefix()
+ hotfixName.replaceFirst(
gitFlowConfig.getHotfixBranchPrefix(), ""),
"-m", "tagging hotfix");
// check whether release branch exists
// git for-each-ref --count=1 --format="%(refname:short)"
// refs/heads/release/*
final String releaseBranch = executeGitCommandReturn(
"for-each-ref", "--count=1",
"--format=\"%(refname:short)\"", "refs/heads/"
+ gitFlowConfig.getReleaseBranchPrefix() + "*");
// if release branch exists merge hotfix changes into it
if (StringUtils.isNotBlank(releaseBranch)) {
// git checkout release
executeGitCommand("checkout", releaseBranch);
// git merge --no-ff hotfix/...
executeGitCommand("merge", "--no-ff", hotfixName);
} else {
// git checkout develop
executeGitCommand("checkout",
gitFlowConfig.getDevelopmentBranch());
// git merge --no-ff hotfix/...
executeGitCommand("merge", "--no-ff", hotfixName);
String nextSnapshotVersion = null;
// get next snapshot version
try {
DefaultVersionInfo versionInfo = new DefaultVersionInfo(
project.getVersion());
nextSnapshotVersion = versionInfo.getNextVersion()
.getSnapshotVersionString();
} catch (VersionParseException e) {
if (getLog().isDebugEnabled()) {
getLog().debug(e);
}
}
if (StringUtils.isBlank(nextSnapshotVersion)) {
throw new MojoFailureException(
"Next snapshot version is blank.");
}
// mvn versions:set -DnewVersion=... -DgenerateBackupPoms=false
executeMvnCommand(VERSIONS_MAVEN_PLUGIN + ":set",
"-DnewVersion=" + nextSnapshotVersion,
"-DgenerateBackupPoms=false");
// git commit -a -m updating poms for next development version
executeGitCommand("commit", "-a", "-m",
"updating poms for next development version");
}
// git branch -d hotfix/...
executeGitCommand("branch", "-d", hotfixName);
} catch (CommandLineException e) {
e.printStackTrace();
}
}
#location 59
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
try {
// set git flow configuration
initGitFlowConfig();
// check uncommitted changes
checkUncommittedChanges();
// check snapshots dependencies
if (!allowSnapshots) {
checkSnapshotDependencies();
}
// fetch and check remote
if (fetchRemote) {
if (notSameProdDevName()) {
gitFetchRemoteAndCompare(gitFlowConfig
.getDevelopmentBranch());
}
gitFetchRemoteAndCompare(gitFlowConfig.getProductionBranch());
}
// git for-each-ref --count=1 refs/heads/release/*
final String releaseBranch = gitFindBranches(
gitFlowConfig.getReleaseBranchPrefix(), true);
if (StringUtils.isNotBlank(releaseBranch)) {
throw new MojoFailureException(
"Release branch already exists. Cannot start release.");
}
// need to be in develop to get correct project version
// git checkout develop
gitCheckout(gitFlowConfig.getDevelopmentBranch());
if (!skipTestProject) {
// mvn clean test
mvnCleanTest();
}
// get current project version from pom
final String currentVersion = getCurrentProjectVersion();
String defaultVersion = null;
if (tychoBuild) {
defaultVersion = currentVersion;
} else {
// get default release version
try {
final DefaultVersionInfo versionInfo = new DefaultVersionInfo(
currentVersion);
defaultVersion = versionInfo.getReleaseVersionString();
} catch (VersionParseException e) {
if (getLog().isDebugEnabled()) {
getLog().debug(e);
}
}
}
if (defaultVersion == null) {
throw new MojoFailureException(
"Cannot get default project version.");
}
String version = null;
if (settings.isInteractiveMode()) {
try {
version = prompter.prompt("What is release version? ["
+ defaultVersion + "]");
} catch (PrompterException e) {
getLog().error(e);
}
} else {
version = releaseVersion;
}
if (StringUtils.isBlank(version)) {
version = defaultVersion;
}
// execute if version changed
if (!version.equals(currentVersion)) {
// mvn set version
mvnSetVersions(version);
// git commit -a -m updating versions for release
gitCommit(commitMessages.getReleaseStartMessage());
}
if (notSameProdDevName()) {
// git checkout master
gitCheckout(gitFlowConfig.getProductionBranch());
gitMerge(gitFlowConfig.getDevelopmentBranch(), releaseRebase,
releaseMergeNoFF);
}
if (!skipTag) {
if (tychoBuild && ArtifactUtils.isSnapshot(version)) {
version = version.replace("-" + Artifact.SNAPSHOT_VERSION,
"");
}
// git tag -a ...
gitTag(gitFlowConfig.getVersionTagPrefix() + version,
commitMessages.getTagReleaseMessage());
}
if (notSameProdDevName()) {
// git checkout develop
gitCheckout(gitFlowConfig.getDevelopmentBranch());
}
String nextSnapshotVersion = null;
// get next snapshot version
try {
final DefaultVersionInfo versionInfo = new DefaultVersionInfo(
version);
nextSnapshotVersion = versionInfo.getNextVersion()
.getSnapshotVersionString();
} catch (VersionParseException e) {
if (getLog().isDebugEnabled()) {
getLog().debug(e);
}
}
if (StringUtils.isBlank(nextSnapshotVersion)) {
throw new MojoFailureException(
"Next snapshot version is blank.");
}
// mvn set version
mvnSetVersions(nextSnapshotVersion);
// git commit -a -m updating for next development version
gitCommit(commitMessages.getReleaseFinishMessage());
if (installProject) {
// mvn clean install
mvnCleanInstall();
}
if (pushRemote) {
gitPush(gitFlowConfig.getProductionBranch(), !skipTag);
if (notSameProdDevName()) {
gitPush(gitFlowConfig.getDevelopmentBranch(), !skipTag);
}
}
} catch (CommandLineException e) {
getLog().error(e);
}
}
|
#vulnerable code
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
try {
// set git flow configuration
initGitFlowConfig();
// check uncommitted changes
checkUncommittedChanges();
// check snapshots dependencies
if (!allowSnapshots) {
checkSnapshotDependencies();
}
// fetch and check remote
if (fetchRemote) {
if (notSameProdDevName()) {
gitFetchRemoteAndCompare(gitFlowConfig
.getDevelopmentBranch());
}
gitFetchRemoteAndCompare(gitFlowConfig.getProductionBranch());
}
// git for-each-ref --count=1 refs/heads/release/*
final String releaseBranch = gitFindBranches(
gitFlowConfig.getReleaseBranchPrefix(), true);
if (StringUtils.isNotBlank(releaseBranch)) {
throw new MojoFailureException(
"Release branch already exists. Cannot start release.");
}
// need to be in develop to get correct project version
// git checkout develop
gitCheckout(gitFlowConfig.getDevelopmentBranch());
if (!skipTestProject) {
// mvn clean test
mvnCleanTest();
}
// get current project version from pom
final String currentVersion = getCurrentProjectVersion();
String defaultVersion = null;
if (tychoBuild) {
defaultVersion = currentVersion;
} else {
// get default release version
try {
final DefaultVersionInfo versionInfo = new DefaultVersionInfo(
currentVersion);
defaultVersion = versionInfo.getReleaseVersionString();
} catch (VersionParseException e) {
if (getLog().isDebugEnabled()) {
getLog().debug(e);
}
}
}
if (defaultVersion == null) {
throw new MojoFailureException(
"Cannot get default project version.");
}
String version = null;
if (settings.isInteractiveMode()) {
try {
version = prompter.prompt("What is release version? ["
+ defaultVersion + "]");
} catch (PrompterException e) {
getLog().error(e);
}
}
if (StringUtils.isBlank(version)) {
version = defaultVersion;
}
// execute if version changed
if (!version.equals(currentVersion)) {
// mvn set version
mvnSetVersions(version);
// git commit -a -m updating versions for release
gitCommit(commitMessages.getReleaseStartMessage());
}
if (notSameProdDevName()) {
// git checkout master
gitCheckout(gitFlowConfig.getProductionBranch());
gitMerge(gitFlowConfig.getDevelopmentBranch(), releaseRebase,
releaseMergeNoFF);
}
if (!skipTag) {
if (tychoBuild && ArtifactUtils.isSnapshot(version)) {
version = version.replace("-" + Artifact.SNAPSHOT_VERSION,
"");
}
// git tag -a ...
gitTag(gitFlowConfig.getVersionTagPrefix() + version,
commitMessages.getTagReleaseMessage());
}
if (notSameProdDevName()) {
// git checkout develop
gitCheckout(gitFlowConfig.getDevelopmentBranch());
}
String nextSnapshotVersion = null;
// get next snapshot version
try {
final DefaultVersionInfo versionInfo = new DefaultVersionInfo(
version);
nextSnapshotVersion = versionInfo.getNextVersion()
.getSnapshotVersionString();
} catch (VersionParseException e) {
if (getLog().isDebugEnabled()) {
getLog().debug(e);
}
}
if (StringUtils.isBlank(nextSnapshotVersion)) {
throw new MojoFailureException(
"Next snapshot version is blank.");
}
// mvn set version
mvnSetVersions(nextSnapshotVersion);
// git commit -a -m updating for next development version
gitCommit(commitMessages.getReleaseFinishMessage());
if (installProject) {
// mvn clean install
mvnCleanInstall();
}
if (pushRemote) {
gitPush(gitFlowConfig.getProductionBranch(), !skipTag);
if (notSameProdDevName()) {
gitPush(gitFlowConfig.getDevelopmentBranch(), !skipTag);
}
}
} catch (CommandLineException e) {
getLog().error(e);
}
}
#location 81
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
validateConfiguration();
try {
// set git flow configuration
initGitFlowConfig();
// check uncommitted changes
checkUncommittedChanges();
String tag = null;
if (settings.isInteractiveMode()) {
// get tags
String tagsStr = gitFindTags();
if (StringUtils.isBlank(tagsStr)) {
throw new MojoFailureException("There are no tags.");
}
try {
tag = prompter.prompt("Choose tag to start support branch",
Arrays.asList(tagsStr.split("\\r?\\n")));
} catch (PrompterException e) {
throw new MojoFailureException("support-start", e);
}
} else if (StringUtils.isNotBlank(tagName)) {
if (gitCheckTagExists(tagName)) {
tag = tagName;
} else {
throw new MojoFailureException("The tag '" + tagName + "' doesn't exist.");
}
} else {
getLog().info("The tagName is blank. Using the last tag.");
tag = gitFindLastTag();
}
if (StringUtils.isBlank(tag)) {
throw new MojoFailureException("Tag is blank.");
}
// git for-each-ref refs/heads/support/...
final boolean supportBranchExists = gitCheckBranchExists(gitFlowConfig
.getSupportBranchPrefix() + tag);
if (supportBranchExists) {
throw new MojoFailureException(
"Support branch with that name already exists.");
}
// git checkout -b ... tag
gitCreateAndCheckout(gitFlowConfig.getSupportBranchPrefix() + tag, tag);
if (installProject) {
// mvn clean install
mvnCleanInstall();
}
if (pushRemote) {
gitPush(gitFlowConfig.getSupportBranchPrefix() + tag, false);
}
} catch (CommandLineException e) {
throw new MojoFailureException("support-start", e);
}
}
|
#vulnerable code
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
validateConfiguration();
try {
// set git flow configuration
initGitFlowConfig();
// check uncommitted changes
checkUncommittedChanges();
// get tags
String tagsStr = gitFindTags();
if (StringUtils.isBlank(tagsStr)) {
throw new MojoFailureException("There are no tags.");
}
String tagName = null;
try {
tagName = prompter.prompt("Choose tag to start support branch",
Arrays.asList(tagsStr.split("\\r?\\n")));
} catch (PrompterException e) {
throw new MojoFailureException("support-start", e);
}
// git for-each-ref refs/heads/support/...
final boolean supportBranchExists = gitCheckBranchExists(gitFlowConfig
.getSupportBranchPrefix() + tagName);
if (supportBranchExists) {
throw new MojoFailureException(
"Support branch with that name already exists.");
}
// git checkout -b ... tag
gitCreateAndCheckout(gitFlowConfig.getSupportBranchPrefix()
+ tagName, tagName);
if (installProject) {
// mvn clean install
mvnCleanInstall();
}
if (pushRemote) {
gitPush(gitFlowConfig.getSupportBranchPrefix() + tagName, false);
}
} catch (CommandLineException e) {
throw new MojoFailureException("support-start", e);
}
}
#location 22
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public <T, S extends Geometry> RTree<T, S> deserialize(long sizeBytes, InputStream is,
Func1<byte[], T> deserializer) throws IOException {
byte[] bytes = readFully(is, (int) sizeBytes);
Tree_ t = Tree_.getRootAsTree_(ByteBuffer.wrap(bytes));
Node_ node = t.root();
Context<T, S> context = new Context<T, S>(t.context().minChildren(),
t.context().maxChildren(), new SelectorRStar(), new SplitterRStar(),
new FactoryImmutable<T, S>());
final Node<T, S> root;
if (node.childrenLength() > 0)
root = new NonLeafFlatBuffersStatic<T, S>(node, context, deserializer);
else {
List<Entry<T, S>> entries = FlatBuffersHelper.createEntries(node, deserializer);
root = new LeafDefault<T, S>(entries, context);
}
return SerializerHelper.create(Optional.of(root), (int) t.size(), context);
}
|
#vulnerable code
public <T, S extends Geometry> RTree<T, S> deserialize(long sizeBytes, InputStream is,
Func1<byte[], T> deserializer) throws IOException {
byte[] bytes = readFully(is, (int) sizeBytes);
Tree_ t = Tree_.getRootAsTree_(ByteBuffer.wrap(bytes));
Node_ node = t.root();
Context<T, S> context = new Context<T, S>(t.context().minChildren(),
t.context().maxChildren(), new SelectorRStar(), new SplitterRStar(),
new FactoryImmutable<T, S>());
Node<T, S> root = new NonLeafFlatBuffersStatic<T, S>(node, context, deserializer);
return SerializerHelper.create(Optional.of(root), 1, context);
}
#location 9
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@SuppressWarnings("unchecked")
static <T, S extends Geometry> List<Entry<T, S>> createEntries(Node_ node,
Func1<byte[], T> deserializer) {
List<Entry<T, S>> list = new ArrayList<Entry<T, S>>(node.entriesLength());
for (int i = 0; i < node.entriesLength(); i++) {
Entry_ entry = node.entries(i);
Geometry_ g = entry.geometry();
final Geometry geometry;
if (g.type() == GeometryType_.Box) {
Box_ b = g.box();
geometry = Rectangle.create(b.minX(), b.minY(), b.maxX(), b.maxY());
} else if (g.type() == GeometryType_.Point) {
Point_ p = g.point();
geometry = Point.create(p.x(), p.y());
} else if (g.type() == GeometryType_.Circle) {
Circle_ c = g.circle();
geometry = Circle.create(c.x(), c.y(), c.radius());
} else
throw new RuntimeException("unexpected");
ByteBuffer bb = entry.objectAsByteBuffer();
byte[] bytes = Arrays.copyOfRange(bb.array(), bb.position(), bb.limit());
list.add(EntryDefault.<T, S> entry(deserializer.call(bytes), (S) geometry));
}
return list;
}
|
#vulnerable code
@SuppressWarnings("unchecked")
static <T, S extends Geometry> List<Entry<T, S>> createEntries(Node_ node,
Func1<byte[], T> deserializer) {
List<Entry<T, S>> list = new ArrayList<Entry<T, S>>(node.entriesLength());
for (int i = 0; i < node.entriesLength(); i++) {
Entry_ entry = node.entries(i);
Geometry_ g = node.entries(i).geometry();
final Geometry geometry;
if (g.type() == GeometryType_.Box) {
Box_ b = g.box();
geometry = Rectangle.create(b.minX(), b.minY(), b.maxX(), b.maxY());
} else if (g.type() == GeometryType_.Point) {
Point_ p = g.point();
geometry = Point.create(p.x(), p.y());
} else if (g.type() == GeometryType_.Circle) {
Circle_ c = g.circle();
geometry = Circle.create(c.x(), c.y(), c.radius());
} else
throw new RuntimeException("unexpected");
ByteBuffer bb = entry.objectAsByteBuffer();
byte[] bytes = Arrays.copyOfRange(bb.array(), bb.position(), bb.limit());
list.add(EntryDefault.<T, S> entry(deserializer.call(bytes), (S) geometry));
}
return list;
}
#location 7
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private List<Node<T, S>> createChildren() {
List<Node<T, S>> children = new ArrayList<Node<T, S>>(node.childrenLength());
for (int i = 0; i < node.childrenLength(); i++) {
Node_ child = node.children(i);
if (child.childrenLength() > 0)
children.add(new NonLeafFlatBuffersStatic<T, S>(child, context, deserializer));
else
children.add(new LeafDefault<T, S>(
FlatBuffersHelper.<T, S> createEntries(child, deserializer), context));
}
return children;
}
|
#vulnerable code
private List<Node<T, S>> createChildren() {
List<Node<T, S>> children = new ArrayList<Node<T, S>>(node.childrenLength());
for (int i = 0; i < node.childrenLength(); i++) {
children.add(
new NonLeafFlatBuffersStatic<T, S>(node.children(i), context, deserializer));
}
return children;
}
#location 5
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private void roundTrip(InternalStructure structure, boolean backpressure) throws Exception {
RTree<Object, Point> tree = RTree.star().maxChildren(10).create();
tree = tree.add(GreekEarthquakes.entries()).last().toBlocking().single();
long t = System.currentTimeMillis();
File file = new File("target/file");
FileOutputStream os = new FileOutputStream(file);
Serializer<Object, Point> fbSerializer = createSerializer();
serialize(tree, t, file, os, fbSerializer);
deserialize(structure, file, fbSerializer, backpressure);
}
|
#vulnerable code
private void roundTrip(InternalStructure structure, boolean backpressure) throws Exception {
RTree<Object, Point> tree = RTree.star().maxChildren(10).create();
tree = tree.add(GreekEarthquakes.entries()).last().toBlocking().single();
long t = System.currentTimeMillis();
File file = new File("target/file");
FileOutputStream os = new FileOutputStream(file);
SerializerFlatBuffers<Object, Point> fbSerializer = createSerializer();
serialize(tree, t, file, os, fbSerializer);
deserialize(structure, file, fbSerializer, backpressure);
}
#location 9
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@SuppressWarnings("unchecked")
static <S extends Geometry> S toGeometry(Geometry_ g) {
final Geometry result;
byte type = g.type();
if (type == GeometryType_.Box) {
result = createBox(g.box());
} else if (type == GeometryType_.Point) {
Point_ p = g.point();
result = Geometries.point(p.x(), p.y());
} else if (type == GeometryType_.Circle) {
Circle_ c = g.circle();
result = Geometries.circle(c.x(), c.y(), c.radius());
} else if (type == GeometryType_.Line) {
result = createLine(g.line());
} else
throw new RuntimeException("unexpected");
return (S) result;
}
|
#vulnerable code
@SuppressWarnings("unchecked")
static <S extends Geometry> S toGeometry(Geometry_ g) {
final Geometry result;
byte type = g.type();
if (type == GeometryType_.Box) {
result = createBox(g.box());
} else if (type == GeometryType_.Point) {
result = Geometries.point(g.point().x(), g.point().y());
} else if (type == GeometryType_.Circle) {
Circle_ c = g.circle();
result = Geometries.circle(c.x(), c.y(), c.radius());
} else if (type == GeometryType_.Line) {
result = createLine(g.line());
} else
throw new RuntimeException("unexpected");
return (S) result;
}
#location 8
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private List<Node<T, S>> createChildren() {
List<Node<T, S>> children = new ArrayList<Node<T, S>>(node.childrenLength());
// reduce allocations by resusing objects
int numChildren = node.childrenLength();
for (int i = 0; i < numChildren; i++) {
Node_ child = node.children(i);
if (child.childrenLength()>0) {
children.add(new NonLeafFlatBuffers<T, S>(child, context, deserializer));
} else {
children.add(new LeafFlatBuffers<T,S>(child, context, deserializer));
}
}
return children;
}
|
#vulnerable code
private List<Node<T, S>> createChildren() {
List<Node<T, S>> children = new ArrayList<Node<T, S>>(node.childrenLength());
// reduce allocations by resusing objects
int numChildren = node.childrenLength();
for (int i = 0; i < numChildren; i++) {
Node_ child = node.children(i);
children.add(new NonLeafFlatBuffers<T, S>(child, context, deserializer));
}
return children;
}
#location 7
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testSerializeRoundTrip() throws IOException {
RTree<Object, Point> tree = RTree.star().maxChildren(10).create();
tree = tree.add(GreekEarthquakes.entries()).last().toBlocking().single();
long t = System.currentTimeMillis();
File output = new File("target/file");
FileOutputStream os = new FileOutputStream(output);
FlatBuffersSerializer serializer = new FlatBuffersSerializer();
serializer.serialize(tree, new Func1<Object, byte[]>() {
@Override
public byte[] call(Object o) {
return EMPTY;
}
}, os);
os.close();
System.out.println("written in " + (System.currentTimeMillis() - t) + "ms, " + "file size="
+ output.length() / 1000000.0 + "MB");
System.out.println("bytes per entry=" + output.length() / tree.size());
InputStream is = new FileInputStream(output);
t = System.currentTimeMillis();
RTree<Object, Geometry> tr = serializer.deserialize(output.length(), is,
new Func1<byte[], Object>() {
@Override
public Object call(byte[] bytes) {
return "a";
}
});
System.out.println("read in " + (System.currentTimeMillis() - t) + "ms");
int found = tr.search(Geometries.rectangle(40, 27.0, 40.5, 27.5)).count().toBlocking()
.single();
System.out.println("found=" + found);
assertEquals(22, found);
}
|
#vulnerable code
@Test
public void testSerializeRoundTrip() throws IOException {
RTree<Object, Point> tree = RTree.star().maxChildren(10).create();
tree = tree.add(GreekEarthquakes.entries()).last().toBlocking().single();
long t = System.currentTimeMillis();
File output = new File("target/file");
FileOutputStream os = new FileOutputStream(output);
FlatBuffersSerializer serializer = new FlatBuffersSerializer();
serializer.serialize(tree, new Func1<Object, byte[]>() {
@Override
public byte[] call(Object o) {
return EMPTY;
}
}, os);
os.close();
System.out.println("written in " + (System.currentTimeMillis() - t) + "ms, " + "file size="
+ output.length() / 1000000.0 + "MB");
System.out.println("bytes per entry=" + output.length() / tree.size());
InputStream is = new FileInputStream(output);
if (false) {
RTree<Object, Geometry> tr = serializer.deserialize(is, new Func1<byte[], Object>() {
@Override
public Object call(byte[] bytes) {
return "a";
}
});
}
}
#location 20
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static void main(String[] args) throws IOException {
Client client = new TransportClient().addTransportAddress(new InetSocketTransportAddress("localhost", 9300));
BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(new File("/home/ansj/temp/20140421.txt"))));
String temp = null;
int a = 0 ;
while ((temp = br.readLine()) != null) {
if(a++%100==0)
System.out.println(a);
try {
JSONObject job = JSON.parseObject(br.readLine());
client.prepareIndex().setIndex("testdoc").setType("testdoc").setSource(job).setTimeout("10s").execute().actionGet();
} catch (Exception e) {
e.printStackTrace();
}
}
}
|
#vulnerable code
public static void main(String[] args) throws IOException {
Client client = new TransportClient().addTransportAddress(new InetSocketTransportAddress("localhost", 9300));
BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(new File("/home/ansj/temp/20140421.txt"))));
String temp = null;
while ((temp = br.readLine()) != null) {
System.out.println("in");
try {
JSONObject job = JSON.parseObject(br.readLine());
client.prepareIndex().setIndex("testdoc").setType("testdoc").setSource(job).execute().actionGet();
} catch (Exception e) {
e.printStackTrace();
}
}
}
#location 14
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private void findOrderBy(MySqlSelectQueryBlock query, Select select) throws SqlParseException {
SQLOrderBy orderBy = query.getOrderBy();
if (orderBy == null) {
return;
}
List<SQLSelectOrderByItem> items = orderBy.getItems();
addOrderByToSelect(select, items, null);
}
|
#vulnerable code
private void findOrderBy(MySqlSelectQueryBlock query, Select select) throws SqlParseException {
SQLOrderBy orderBy = query.getOrderBy();
if (orderBy == null) {
return;
}
List<SQLSelectOrderByItem> items = orderBy.getItems();
List<String> lists = new ArrayList<>();
for (SQLSelectOrderByItem sqlSelectOrderByItem : items) {
SQLExpr expr = sqlSelectOrderByItem.getExpr();
lists.add(FieldMaker.makeField(expr, null,null).toString());
if (sqlSelectOrderByItem.getType() == null) {
sqlSelectOrderByItem.setType(SQLOrderingSpecification.ASC);
}
String type = sqlSelectOrderByItem.getType().toString();
for (String name : lists) {
name = name.replace("`", "");
select.addOrderBy(name, type);
}
lists.clear();
}
}
#location 15
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private SearchRequestBuilder explan(SQLQueryExpr SQLQueryExpr) throws SqlParseException {
Select select = new SqlParser().parseSelect(SQLQueryExpr);
Query query = null;
if (select.isAgg) {
query = new AggregationQuery(client, select);
} else {
query = new DefaultQuery(client, select);
}
return query.explan();
}
|
#vulnerable code
private SearchRequestBuilder explan(SQLQueryExpr SQLQueryExpr) throws SqlParseException {
Select select = new SqlParser().parseSelect(SQLQueryExpr);
Query query = null;
Client client = new TransportClient();
if (select.isAgg) {
query = new AggregationQuery(client, select);
} else {
query = new DefaultQuery(client, select);
}
return query.explan();
}
#location 13
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public IonValue singleValue(String ionText)
{
CloseableIterator<IonValue> it = iterate(ionText);
return singleValue(it);
}
|
#vulnerable code
public IonValue singleValue(String ionText)
{
Iterator<IonValue> it = iterate(ionText);
return singleValue(it);
}
#location 4
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public IonValue singleValue(byte[] ionData)
{
CloseableIterator<IonValue> iterator = iterate(ionData);
return singleValue(iterator);
}
|
#vulnerable code
public IonValue singleValue(byte[] ionData)
{
Iterator<IonValue> it = iterate(ionData);
return singleValue(it);
}
#location 4
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testFlushingUnlockedSymtab()
throws Exception
{
byte[] bytes = flushUnlockedSymtab(false);
assertEquals(0, bytes.length);
}
|
#vulnerable code
@Test
public void testFlushingUnlockedSymtab()
throws Exception
{
iw = makeWriter();
iw.writeSymbol("force a local symtab");
SymbolTable symtab = iw.getSymbolTable();
symtab.intern("fred_1");
symtab.intern("fred_2");
iw.writeSymbol("fred_1");
// This would cause an appended LST to be written before the next value.
iw.flush();
IonReader reader = IonReaderBuilder.standard().build(myOutputStream.toByteArray());
assertEquals(IonType.SYMBOL, reader.next());
assertEquals("force a local symtab", reader.stringValue());
assertEquals(IonType.SYMBOL, reader.next());
assertEquals("fred_1", reader.stringValue());
assertNull(reader.next());
}
#location 20
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
protected void testTypeAnnotationHashCode(String text, IonType type)
{
String sharedSymbolTable = "$ion_symbol_table::{imports:[{name:\"foo\", version: 1, max_id:90}]}";
checkType(type, oneValue("annot1::" + text));
checkType(type, oneValue(sharedSymbolTable + "$99::" + text));
assertIonEqImpliesHashEq(oneValue("annot1::" + text),
oneValue("annot2::" + text));
assertIonEqImpliesHashEq(oneValue("annot1::annot2::" + text),
oneValue("annot1::annot2::" + text));
assertIonEqImpliesHashEq(oneValue("annot1::annot2::annot3::" + text),
oneValue("annot1::annot2::annot3::" + text));
assertIonEqImpliesHashEq(oneValue(sharedSymbolTable + "$99::" + text),
oneValue(sharedSymbolTable + "$98::" + text));
assertIonEqImpliesHashEq(oneValue(sharedSymbolTable + "$99::$98::" + text),
oneValue(sharedSymbolTable + "$99::$98::" + text));
assertIonEqImpliesHashEq(oneValue(sharedSymbolTable + "$99::$98::$97::" + text),
oneValue(sharedSymbolTable + "$99::$98::$97::" + text));
assertIonNotEqImpliesHashNotEq("annot1::" + text,
"annot2::" + text);
assertIonNotEqImpliesHashNotEq("annot1::annot2::" + text,
"annot2::annot1::" + text);
assertIonNotEqImpliesHashNotEq("annot1::annot2::annot3::" + text,
"annot3::annot2::annot1::" + text);
assertIonNotEqImpliesHashNotEq(sharedSymbolTable + "$99::" + text,
sharedSymbolTable + "$98::" + text);
assertIonNotEqImpliesHashNotEq(sharedSymbolTable + "$99::$98::" + text,
sharedSymbolTable + "$98::$99::" + text);
assertIonNotEqImpliesHashNotEq(sharedSymbolTable + "$99::$98::$97::" + text,
sharedSymbolTable + "$97::$98::$99::" + text);
}
|
#vulnerable code
protected void testTypeAnnotationHashCode(String text, IonType type)
{
checkType(type, oneValue("annot1::" + text));
checkType(type, oneValue("$99::" + text));
assertIonEqImpliesHashEq(oneValue("annot1::" + text),
oneValue("annot2::" + text));
assertIonEqImpliesHashEq(oneValue("annot1::annot2::" + text),
oneValue("annot1::annot2::" + text));
assertIonEqImpliesHashEq(oneValue("annot1::annot2::annot3::" + text),
oneValue("annot1::annot2::annot3::" + text));
assertIonEqImpliesHashEq(oneValue("$99::" + text),
oneValue("$98::" + text));
assertIonEqImpliesHashEq(oneValue("$99::$98::" + text),
oneValue("$99::$98::" + text));
assertIonEqImpliesHashEq(oneValue("$99::$98::$97::" + text),
oneValue("$99::$98::$97::" + text));
assertIonNotEqImpliesHashNotEq("annot1::" + text,
"annot2::" + text);
assertIonNotEqImpliesHashNotEq("annot1::annot2::" + text,
"annot2::annot1::" + text);
assertIonNotEqImpliesHashNotEq("annot1::annot2::annot3::" + text,
"annot3::annot2::annot1::" + text);
assertIonNotEqImpliesHashNotEq("$99::" + text,
"$98::" + text);
assertIonNotEqImpliesHashNotEq("$99::$98::" + text,
"$98::$99::" + text);
assertIonNotEqImpliesHashNotEq("$99::$98::$97::" + text,
"$97::$98::$99::" + text);
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testGetMovieChangesList() throws MovieDbException {
LOG.info("getMovieChangesList");
List<ChangeListItem> result = instance.getChangeList(MethodBase.MOVIE, null, null, null);
assertFalse("No movie changes.", result.isEmpty());
}
|
#vulnerable code
@Test
public void testGetMovieChangesList() throws MovieDbException {
LOG.info("getMovieChangesList");
TmdbResultsList<ChangedMedia> result = instance.getChangeList(MethodBase.MOVIE, null, null, null);
assertFalse("No movie changes.", result.getResults().isEmpty());
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void skillViolationAtAct1_shouldWork(){
SolutionAnalyser analyser = new SolutionAnalyser(vrp,solution, new SolutionAnalyser.DistanceCalculator() {
@Override
public double getDistance(String fromLocationId, String toLocationId) {
return vrp.getTransportCosts().getTransportCost(fromLocationId,toLocationId,0.,null,null);
}
});
VehicleRoute route = solution.getRoutes().iterator().next();
Boolean violated = analyser.hasSkillConstraintViolationAtActivity(route.getActivities().get(0), route);
assertFalse(violated);
}
|
#vulnerable code
@Test
public void skillViolationAtAct1_shouldWork(){
SolutionAnalyser analyser = new SolutionAnalyser(vrp,solution, new SolutionAnalyser.DistanceCalculator() {
@Override
public double getDistance(String fromLocationId, String toLocationId) {
return vrp.getTransportCosts().getTransportCost(fromLocationId,toLocationId,0.,null,null);
}
});
VehicleRoute route = solution.getRoutes().iterator().next();
Boolean violated = analyser.skillConstraintIsViolatedAtActivity(route.getActivities().get(0),route);
assertFalse(violated);
}
#location 11
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void begin(VehicleRoute route) {
currentLoad = stateManager.getRouteState(route, StateFactory.LOAD_AT_BEGINNING, Capacity.class);
this.route = route;
}
|
#vulnerable code
@Override
public void begin(VehicleRoute route) {
currentLoad = (int) stateManager.getRouteState(route, StateFactory.LOAD_AT_BEGINNING).toDouble();
this.route = route;
}
#location 3
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void whenSolving_deliverService1_shouldBeInRoute(){
VehicleRoutingProblem.Builder vrpBuilder = VehicleRoutingProblem.Builder.newInstance();
new VrpXMLReader(vrpBuilder).read("src/test/resources/simpleProblem_iniRoutes.xml");
VehicleRoutingProblem vrp = vrpBuilder.build();
VehicleRoutingAlgorithm vra = new SchrimpfFactory().createAlgorithm(vrp);
Collection<VehicleRoutingProblemSolution> solutions = vra.searchSolutions();
VehicleRoutingProblemSolution solution = Solutions.bestOf(solutions);
SolutionPrinter.print(vrp,solution, SolutionPrinter.Print.VERBOSE);
Job job = getInitialJob("1",vrp);
assertTrue(hasActivityIn(solution,"veh1", job));
}
|
#vulnerable code
@Test
public void whenSolving_deliverService1_shouldBeInRoute(){
VehicleRoutingProblem.Builder vrpBuilder = VehicleRoutingProblem.Builder.newInstance();
new VrpXMLReader(vrpBuilder).read("src/test/resources/simpleProblem_iniRoutes.xml");
VehicleRoutingProblem vrp = vrpBuilder.build();
VehicleRoutingAlgorithm vra = new SchrimpfFactory().createAlgorithm(vrp);
Collection<VehicleRoutingProblemSolution> solutions = vra.searchSolutions();
VehicleRoutingProblemSolution solution = Solutions.bestOf(solutions);
assertTrue(hasActivityIn(solution.getRoutes().iterator().next(),"1"));
}
#location 12
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void read(String filename){
BufferedReader reader = getBufferedReader(filename);
String line;
Coordinate[] coords = null;
int[] demands = null;
Integer capacity = null;
List<Integer> depotIds = new ArrayList<Integer>();
boolean isCoordSection = false;
boolean isDemandSection = false;
boolean isDepotSection = false;
int dimensions = 0;
while( ( line = getLine(reader) ) != null ){
if(line.startsWith("EOF")){
break;
}
if(line.startsWith("DIMENSION")){
String[] tokens = line.split(":");
String dim = tokens[1].trim();
dimensions = Integer.parseInt(dim);
coords = new Coordinate[dimensions];
demands = new int[dimensions];
continue;
}
if(line.startsWith("CAPACITY")){
String[] tokens = line.split(":");
capacity = Integer.parseInt(tokens[1].trim());
continue;
}
if(line.startsWith("NODE_COORD_SECTION")){
isCoordSection = true;
isDemandSection = false;
isDepotSection = false;
continue;
}
if(line.startsWith("DEMAND_SECTION")){
isDemandSection = true;
isCoordSection = false;
isDepotSection = false;
continue;
}
if(line.startsWith("DEPOT_SECTION")){
isDepotSection = true;
isDemandSection = false;
isCoordSection = false;
continue;
}
if(isCoordSection){
if(coords == null) throw new IllegalStateException("DIMENSION tag missing");
String[] tokens = line.trim().split("\\s+");
coords[Integer.parseInt(tokens[0]) - 1] = Coordinate.newInstance(Double.parseDouble(tokens[1]), Double.parseDouble(tokens[2]));
continue;
}
if(isDemandSection){
if(demands == null) throw new IllegalStateException("DIMENSION tag missing");
String[] tokens = line.trim().split("\\s+");
demands[Integer.parseInt(tokens[0]) - 1] = Integer.parseInt(tokens[1]);
continue;
}
if(isDepotSection){
if(line.equals("-1")){
isDepotSection = false;
}
else{
depotIds.add(Integer.parseInt(line));
}
}
}
close(reader);
vrpBuilder.setFleetSize(VehicleRoutingProblem.FleetSize.INFINITE);
for(Integer depotId : depotIds){
VehicleTypeImpl type = VehicleTypeImpl.Builder.newInstance("typeId").addCapacityDimension(0,capacity).build();
VehicleImpl vehicle = VehicleImpl.Builder.newInstance("vehicle").setStartLocationId(depotId.toString())
.setStartLocationCoordinate(coords[depotId - 1]).setType(type).build();
vrpBuilder.addVehicle(vehicle);
}
for (int i = 0; i < coords.length; i++) {
String id = "" + (i + 1);
if(depotIds.isEmpty()){
if(i==0) {
VehicleImpl vehicle = VehicleImpl.Builder.newInstance("start")
.setStartLocation(Location.Builder.newInstance().setId(id)
.setCoordinate(coords[i]).build())
.build();
vrpBuilder.addVehicle(vehicle);
continue;
}
}
Service service = Service.Builder.newInstance(id)
.setLocation(Location.Builder.newInstance().setId(id).setCoordinate(coords[i]).build())
.addSizeDimension(0, demands[i]).build();
vrpBuilder.addJob(service);
}
}
|
#vulnerable code
public void read(String filename){
BufferedReader reader = getBufferedReader(filename);
String line;
Coordinate[] coords = null;
Integer[] demands = null;
Integer capacity = null;
List<Integer> depotIds = new ArrayList<Integer>();
boolean isCoordSection = false;
boolean isDemandSection = false;
boolean isDepotSection = false;
while( ( line = getLine(reader) ) != null ){
if(line.startsWith("DIMENSION")){
String[] tokens = line.split(":");
String dim = tokens[1].trim();
coords = new Coordinate[Integer.parseInt(dim)];
demands = new Integer[Integer.parseInt(dim)];
continue;
}
if(line.startsWith("CAPACITY")){
String[] tokens = line.split(":");
capacity = Integer.parseInt(tokens[1].trim());
continue;
}
if(line.startsWith("NODE_COORD_SECTION")){
isCoordSection = true;
isDemandSection = false;
isDepotSection = false;
continue;
}
if(line.startsWith("DEMAND_SECTION")){
isDemandSection = true;
isCoordSection = false;
isDepotSection = false;
continue;
}
if(line.startsWith("DEPOT_SECTION")){
isDepotSection = true;
isDemandSection = false;
isCoordSection = false;
continue;
}
if(isCoordSection){
if(coords == null) throw new IllegalStateException("DIMENSION tag missing");
String[] tokens = line.split("\\s+");
coords[Integer.parseInt(tokens[0]) - 1] = Coordinate.newInstance(Double.parseDouble(tokens[1]), Double.parseDouble(tokens[2]));
continue;
}
if(isDemandSection){
if(demands == null) throw new IllegalStateException("DIMENSION tag missing");
String[] tokens = line.split("\\s+");
demands[Integer.parseInt(tokens[0]) - 1] = Integer.parseInt(tokens[1]);
continue;
}
if(isDepotSection){
if(line.equals("-1")){
isDepotSection = false;
}
else{
depotIds.add(Integer.parseInt(line));
}
}
}
close(reader);
vrpBuilder.setFleetSize(VehicleRoutingProblem.FleetSize.INFINITE);
for(Integer depotId : depotIds){
VehicleTypeImpl type = VehicleTypeImpl.Builder.newInstance("typeId").addCapacityDimension(0,capacity).build();
VehicleImpl vehicle = VehicleImpl.Builder.newInstance("vehicle").setStartLocationId(depotId.toString())
.setStartLocationCoordinate(coords[depotId - 1]).setType(type).build();
vrpBuilder.addVehicle(vehicle);
}
for(int i=0;i<demands.length;i++){
if(demands[i] == 0) continue;
String id = "" + (i+1);
Service service = Service.Builder.newInstance(id).setLocationId(id).setCoord(coords[i]).addSizeDimension(0,demands[i]).build();
vrpBuilder.addJob(service);
}
}
#location 71
#vulnerability type NULL_DEREFERENCE
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.