instruction
stringclasses 1
value | output
stringlengths 64
69.4k
| input
stringlengths 205
32.4k
|
---|---|---|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void test() throws Exception {
String str = "sdajkl;jlqwjqejqweq89080c中jlxczksaouwq9823djadj";
ByteArray bytes = new ByteArray(str.getBytes().length, 10);
byte[] b1 = str.getBytes();
int i = 0;
for (byte b : b1) {
bytes.set(i, b);
assertEquals(b, bytes.get(i));
i++;
}
ByteArray bytes1 = new ByteArray(str.getBytes().length - 10, 10);
ByteArray.arraycopy(bytes, 10, bytes1, 0, bytes.length - 10);
assertEquals(str.substring(10), getString(bytes1));
str = "sdajk";
ByteArray bytes2 = new ByteArray(str.getBytes().length, 10);
b1 = str.getBytes();
i = 0;
for (byte b : b1) {
bytes2.set(i, b);
assertEquals(b, bytes2.get(i));
i++;
}
assertEquals(getString(bytes2), "sdajk");
ByteArray bytes3 = new ByteArray(bytes2.length() - 1, 10);
ByteArray.arraycopy(bytes2, 1, bytes3, 0, bytes2.length() - 1);
assertEquals(str.substring(1), getString(bytes3));
}
|
#vulnerable code
@Test
public void test() throws Exception {
String str = "sdajkl;jlqwjqejqweq89080c中jlxczksaouwq9823djadj";
ByteArray bytes = new ByteArray(str.getBytes().length, 10);
byte[] b1 = str.getBytes();
int i = 0;
for (byte b : b1) {
bytes.set(i, b);
assertEquals(b, bytes.get(i));
i++;
}
ByteArray bytes1 = new ByteArray(str.getBytes().length - 10, 10);
ByteArray.arraycopy(bytes, 10, bytes1, 0, bytes.length - 10);
assertEquals(str.substring(10), new String(bytes1.first()));
str = "sdajk";
ByteArray bytes2 = new ByteArray(str.getBytes().length, 10);
b1 = str.getBytes();
i = 0;
for (byte b : b1) {
bytes2.set(i, b);
assertEquals(b, bytes2.get(i));
i++;
}
assertEquals(new String(bytes2.first()), "sdajk");
ByteArray bytes3 = new ByteArray(bytes2.length() - 1, 10);
ByteArray.arraycopy(bytes2, 1, bytes3, 0, bytes2.length() - 1);
assertEquals(str.substring(1), new String(bytes3.first()));
}
#location 15
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static void main(String[] args) throws IOException, URISyntaxException {
final OutputStream out = new BufferedOutputStream(new FileOutputStream(new File("/path/to/appendonly.aof")));
final RawByteListener rawByteListener = new RawByteListener() {
@Override
public void handle(byte... rawBytes) {
try {
out.write(rawBytes);
} catch (IOException ignore) {
}
}
};
//save 1000 records commands
Replicator replicator = new RedisReplicator("redis://127.0.0.1:6379");
replicator.addRdbListener(new RdbListener() {
@Override
public void preFullSync(Replicator replicator) {
}
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
}
@Override
public void postFullSync(Replicator replicator, long checksum) {
replicator.addRawByteListener(rawByteListener);
}
});
final AtomicInteger acc = new AtomicInteger(0);
replicator.addCommandListener(new CommandListener() {
@Override
public void handle(Replicator replicator, Command command) {
if (acc.incrementAndGet() == 1000) {
try {
out.close();
replicator.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
});
replicator.open();
//check aof file
replicator = new RedisReplicator("redis:///path/to/appendonly.aof");
replicator.addCommandListener(new CommandListener() {
@Override
public void handle(Replicator replicator, Command command) {
System.out.println(command);
}
});
replicator.open();
}
|
#vulnerable code
public static void main(String[] args) throws IOException {
final FileOutputStream out = new FileOutputStream(new File("./src/test/resources/appendonly.aof"));
final RawByteListener rawByteListener = new RawByteListener() {
@Override
public void handle(byte... rawBytes) {
try {
out.write(rawBytes);
} catch (IOException ignore) {
}
}
};
//save 1000 records commands
Replicator replicator = new RedisReplicator("127.0.0.1", 6379, Configuration.defaultSetting());
replicator.addRdbListener(new RdbListener() {
@Override
public void preFullSync(Replicator replicator) {
}
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
}
@Override
public void postFullSync(Replicator replicator, long checksum) {
replicator.addRawByteListener(rawByteListener);
}
});
final AtomicInteger acc = new AtomicInteger(0);
replicator.addCommandListener(new CommandListener() {
@Override
public void handle(Replicator replicator, Command command) {
if (acc.incrementAndGet() == 1000) {
try {
out.close();
replicator.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
});
replicator.open();
//check aof file
replicator = new RedisReplicator(new File("./src/test/resources/appendonly.aof"), FileType.AOF, Configuration.defaultSetting());
replicator.addCommandListener(new CommandListener() {
@Override
public void handle(Replicator replicator, Command command) {
System.out.println(command);
}
});
replicator.open();
}
#location 44
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public Event applyZSetZipList(RedisInputStream in, DB db, int version) throws IOException {
/*
* |<zlbytes>| <zltail>| <zllen>| <entry> ...<entry> | <zlend>|
* | 4 bytes | 4 bytes | 2bytes | zipListEntry ... | 1byte |
*/
BaseRdbParser parser = new BaseRdbParser(in);
KeyStringValueZSet o12 = new KeyStringValueZSet();
byte[] key = parser.rdbLoadEncodedStringObject().first();
ByteArray aux = parser.rdbLoadPlainStringObject();
RedisInputStream stream = new RedisInputStream(new ByteArrayInputStream(aux));
Set<ZSetEntry> zset = new LinkedHashSet<>();
int zlbytes = BaseRdbParser.LenHelper.zlbytes(stream);
int zltail = BaseRdbParser.LenHelper.zltail(stream);
int zllen = BaseRdbParser.LenHelper.zllen(stream);
while (zllen > 0) {
byte[] element = BaseRdbParser.StringHelper.zipListEntry(stream);
zllen--;
double score = Double.valueOf(new String(BaseRdbParser.StringHelper.zipListEntry(stream), CHARSET));
zllen--;
zset.add(new ZSetEntry(new String(element, CHARSET), score, element));
}
int zlend = BaseRdbParser.LenHelper.zlend(stream);
if (zlend != 255) {
throw new AssertionError("zlend expect 255 but " + zlend);
}
o12.setValueRdbType(RDB_TYPE_ZSET_ZIPLIST);
o12.setValue(zset);
o12.setDb(db);
o12.setKey(new String(key, CHARSET));
o12.setRawKey(key);
return o12;
}
|
#vulnerable code
@Override
public Event applyZSetZipList(RedisInputStream in, DB db, int version) throws IOException {
/*
* |<zlbytes>| <zltail>| <zllen>| <entry> ...<entry> | <zlend>|
* | 4 bytes | 4 bytes | 2bytes | zipListEntry ... | 1byte |
*/
BaseRdbParser parser = new BaseRdbParser(in);
KeyStringValueZSet o12 = new KeyStringValueZSet();
String key = parser.rdbLoadEncodedStringObject().string;
ByteArray aux = parser.rdbLoadPlainStringObject();
RedisInputStream stream = new RedisInputStream(new ByteArrayInputStream(aux));
Set<ZSetEntry> zset = new LinkedHashSet<>();
int zlbytes = BaseRdbParser.LenHelper.zlbytes(stream);
int zltail = BaseRdbParser.LenHelper.zltail(stream);
int zllen = BaseRdbParser.LenHelper.zllen(stream);
while (zllen > 0) {
String element = BaseRdbParser.StringHelper.zipListEntry(stream);
zllen--;
double score = Double.valueOf(BaseRdbParser.StringHelper.zipListEntry(stream));
zllen--;
zset.add(new ZSetEntry(element, score));
}
int zlend = BaseRdbParser.LenHelper.zlend(stream);
if (zlend != 255) {
throw new AssertionError("zlend expect 255 but " + zlend);
}
o12.setValueRdbType(RDB_TYPE_ZSET_ZIPLIST);
o12.setValue(zset);
o12.setDb(db);
o12.setKey(key);
return o12;
}
#location 24
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static void main(String[] args) throws IOException, URISyntaxException {
final OutputStream out = new BufferedOutputStream(new FileOutputStream(new File("/path/to/dump.rdb")));
final RawByteListener rawByteListener = new RawByteListener() {
@Override
public void handle(byte... rawBytes) {
try {
out.write(rawBytes);
} catch (IOException ignore) {
}
}
};
//save rdb from remote server
Replicator replicator = new RedisReplicator("redis://127.0.0.1:6379");
replicator.addRdbListener(new RdbListener() {
@Override
public void preFullSync(Replicator replicator) {
replicator.addRawByteListener(rawByteListener);
}
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
}
@Override
public void postFullSync(Replicator replicator, long checksum) {
replicator.removeRawByteListener(rawByteListener);
try {
out.close();
replicator.close();
} catch (IOException ignore) {
}
}
});
replicator.open();
//check rdb file
replicator = new RedisReplicator("redis:///path/to/dump.rdb");
replicator.addRdbListener(new RdbListener.Adaptor() {
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
System.out.println(kv);
}
});
replicator.open();
}
|
#vulnerable code
public static void main(String[] args) throws IOException {
final FileOutputStream out = new FileOutputStream(new File("./src/test/resources/dump.rdb"));
final RawByteListener rawByteListener = new RawByteListener() {
@Override
public void handle(byte... rawBytes) {
try {
out.write(rawBytes);
} catch (IOException ignore) {
}
}
};
//save rdb from remote server
Replicator replicator = new RedisReplicator("127.0.0.1", 6379, Configuration.defaultSetting());
replicator.addRdbListener(new RdbListener() {
@Override
public void preFullSync(Replicator replicator) {
replicator.addRawByteListener(rawByteListener);
}
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
}
@Override
public void postFullSync(Replicator replicator, long checksum) {
replicator.removeRawByteListener(rawByteListener);
try {
out.close();
replicator.close();
} catch (IOException ignore) {
}
}
});
replicator.open();
//check rdb file
replicator = new RedisReplicator(new File("./src/test/resources/dump.rdb"), FileType.RDB, Configuration.defaultSetting());
replicator.addRdbListener(new RdbListener.Adaptor() {
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
System.out.println(kv);
}
});
replicator.open();
}
#location 36
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void close() {
if (!connected.compareAndSet(true, false)) return;
synchronized (this) {
if (heartBeat != null) {
heartBeat.cancel();
heartBeat = null;
logger.info("heart beat canceled.");
}
}
try {
if (inputStream != null) inputStream.close();
} catch (IOException e) {
//NOP
}
try {
if (outputStream != null) outputStream.close();
} catch (IOException e) {
//NOP
}
try {
if (socket != null && !socket.isClosed()) socket.close();
} catch (IOException e) {
//NOP
}
logger.info("channel closed");
}
|
#vulnerable code
@Override
public void close() {
if (!connected.compareAndSet(true, false)) return;
if (heartBeat != null) {
heartBeat.cancel();
heartBeat = null;
logger.info("heart beat canceled.");
}
try {
if (inputStream != null) inputStream.close();
} catch (IOException e) {
//NOP
}
try {
if (outputStream != null) outputStream.close();
} catch (IOException e) {
//NOP
}
try {
if (socket != null && !socket.isClosed()) socket.close();
} catch (IOException e) {
//NOP
}
logger.info("channel closed");
}
#location 20
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void open() throws IOException {
try {
doOpen();
} finally {
close();
}
}
|
#vulnerable code
@Override
public void open() throws IOException {
for (int i = 0; i < configuration.getRetries() || configuration.getRetries() <= 0; i++) {
try {
connect();
if (configuration.getAuthPassword() != null) auth(configuration.getAuthPassword());
sendSlavePort();
sendSlaveIp();
sendSlaveCapa();
//reset retries
i = 0;
logger.info("PSYNC " + configuration.getMasterRunId() + " " + String.valueOf(configuration.getOffset()));
send("PSYNC".getBytes(), configuration.getMasterRunId().getBytes(), String.valueOf(configuration.getOffset()).getBytes());
final String reply = (String) reply();
SyncMode syncMode = trySync(reply);
//bug fix.
if (syncMode == SyncMode.PSYNC && connected.get()) {
//heart beat send REPLCONF ACK ${slave offset}
synchronized (this) {
heartBeat = new Timer("heart beat");
//bug fix. in this point closed by other thread. multi-thread issue
heartBeat.schedule(new TimerTask() {
@Override
public void run() {
try {
send("REPLCONF".getBytes(), "ACK".getBytes(), String.valueOf(configuration.getOffset()).getBytes());
} catch (IOException e) {
//NOP
}
}
}, configuration.getHeartBeatPeriod(), configuration.getHeartBeatPeriod());
logger.info("heart beat started.");
}
}
//sync command
while (connected.get()) {
Object obj = replyParser.parse(new OffsetHandler() {
@Override
public void handle(long len) {
configuration.addOffset(len);
}
});
//command
if (obj instanceof Object[]) {
if (configuration.isVerbose() && logger.isDebugEnabled())
logger.debug(Arrays.deepToString((Object[]) obj));
Object[] command = (Object[]) obj;
CommandName cmdName = CommandName.name((String) command[0]);
Object[] params = new Object[command.length - 1];
System.arraycopy(command, 1, params, 0, params.length);
final CommandParser<? extends Command> operations;
//if command do not register. ignore
if ((operations = commands.get(cmdName)) == null) continue;
//do command replyParser
Command parsedCommand = operations.parse(cmdName, params);
//submit event
this.submitEvent(parsedCommand);
} else {
if (logger.isInfoEnabled()) logger.info("Redis reply:" + obj);
}
}
//connected = false
break;
} catch (/*bug fix*/IOException e) {
//close socket manual
if (!connected.get()) {
break;
}
logger.error("socket error", e);
//connect refused
//connect timeout
//read timeout
//connect abort
//server disconnect connection EOFException
close();
//retry psync in next loop.
logger.info("reconnect to redis-server. retry times:" + (i + 1));
try {
Thread.sleep(configuration.getRetryTimeInterval());
} catch (InterruptedException e1) {
Thread.currentThread().interrupt();
}
}
}
doCloseListener();
}
#location 20
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public Event applyListZipList(RedisInputStream in, DB db, int version) throws IOException {
/*
* |<zlbytes>| <zltail>| <zllen>| <entry> ...<entry> | <zlend>|
* | 4 bytes | 4 bytes | 2bytes | zipListEntry ... | 1byte |
*/
BaseRdbParser parser = new BaseRdbParser(in);
KeyStringValueList o10 = new KeyStringValueList();
byte[] key = parser.rdbLoadEncodedStringObject().first();
ByteArray aux = parser.rdbLoadPlainStringObject();
RedisInputStream stream = new RedisInputStream(new ByteArrayInputStream(aux));
List<String> list = new ArrayList<>();
List<byte[]> rawList = new ArrayList<>();
int zlbytes = BaseRdbParser.LenHelper.zlbytes(stream);
int zltail = BaseRdbParser.LenHelper.zltail(stream);
int zllen = BaseRdbParser.LenHelper.zllen(stream);
for (int i = 0; i < zllen; i++) {
byte[] e = BaseRdbParser.StringHelper.zipListEntry(stream);
list.add(new String(e, CHARSET));
rawList.add(e);
}
int zlend = BaseRdbParser.LenHelper.zlend(stream);
if (zlend != 255) {
throw new AssertionError("zlend expect 255 but " + zlend);
}
o10.setValueRdbType(RDB_TYPE_LIST_ZIPLIST);
o10.setValue(list);
o10.setRawValue(rawList);
o10.setDb(db);
o10.setKey(new String(key, CHARSET));
o10.setRawKey(key);
return o10;
}
|
#vulnerable code
@Override
public Event applyListZipList(RedisInputStream in, DB db, int version) throws IOException {
/*
* |<zlbytes>| <zltail>| <zllen>| <entry> ...<entry> | <zlend>|
* | 4 bytes | 4 bytes | 2bytes | zipListEntry ... | 1byte |
*/
BaseRdbParser parser = new BaseRdbParser(in);
KeyStringValueList o10 = new KeyStringValueList();
String key = parser.rdbLoadEncodedStringObject().string;
ByteArray aux = parser.rdbLoadPlainStringObject();
RedisInputStream stream = new RedisInputStream(new ByteArrayInputStream(aux));
List<String> list = new ArrayList<>();
int zlbytes = BaseRdbParser.LenHelper.zlbytes(stream);
int zltail = BaseRdbParser.LenHelper.zltail(stream);
int zllen = BaseRdbParser.LenHelper.zllen(stream);
for (int i = 0; i < zllen; i++) {
list.add(BaseRdbParser.StringHelper.zipListEntry(stream));
}
int zlend = BaseRdbParser.LenHelper.zlend(stream);
if (zlend != 255) {
throw new AssertionError("zlend expect 255 but " + zlend);
}
o10.setValueRdbType(RDB_TYPE_LIST_ZIPLIST);
o10.setValue(list);
o10.setDb(db);
o10.setKey(key);
return o10;
}
#location 20
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testCloseListener1() throws IOException, InterruptedException {
final AtomicInteger acc = new AtomicInteger(0);
Replicator replicator = new RedisReplicator(
RedisSocketReplicatorTest.class.getClassLoader().getResourceAsStream("dumpV6.rdb"), FileType.RDB,
Configuration.defaultSetting());
replicator.addCloseListener(new CloseListener() {
@Override
public void handle(Replicator replicator) {
System.out.println("close testCloseListener1");
acc.incrementAndGet();
assertEquals(1, acc.get());
}
});
replicator.open();
}
|
#vulnerable code
@Test
public void testCloseListener1() throws IOException, InterruptedException {
final AtomicInteger acc = new AtomicInteger(0);
Replicator replicator = new RedisReplicator(
RedisSocketReplicatorTest.class.getClassLoader().getResourceAsStream("dumpV6.rdb"),
Configuration.defaultSetting());
replicator.addCloseListener(new CloseListener() {
@Override
public void handle(Replicator replicator) {
System.out.println("close testCloseListener1");
acc.incrementAndGet();
assertEquals(1, acc.get());
}
});
replicator.open();
}
#location 15
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testChecksumV7() throws IOException, InterruptedException {
Replicator redisReplicator = new RedisReplicator(
RedisSocketReplicatorTest.class.getClassLoader().getResourceAsStream("dumpV7.rdb"), FileType.RDB,
Configuration.defaultSetting());
final AtomicInteger acc = new AtomicInteger(0);
final AtomicLong atomicChecksum = new AtomicLong(0);
redisReplicator.addRdbListener(new RdbListener.Adaptor() {
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
acc.incrementAndGet();
}
@Override
public void postFullSync(Replicator replicator, long checksum) {
super.postFullSync(replicator, checksum);
atomicChecksum.compareAndSet(0, checksum);
}
});
redisReplicator.addCloseListener(new CloseListener() {
@Override
public void handle(Replicator replicator) {
System.out.println("close testChecksumV7");
assertEquals(19, acc.get());
assertEquals(6576517133597126869L, atomicChecksum.get());
}
});
redisReplicator.open();
}
|
#vulnerable code
@Test
public void testChecksumV7() throws IOException, InterruptedException {
Replicator redisReplicator = new RedisReplicator(
RedisSocketReplicatorTest.class.getClassLoader().getResourceAsStream("dumpV7.rdb"),
Configuration.defaultSetting());
final AtomicInteger acc = new AtomicInteger(0);
final AtomicLong atomicChecksum = new AtomicLong(0);
redisReplicator.addRdbListener(new RdbListener.Adaptor() {
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
acc.incrementAndGet();
}
@Override
public void postFullSync(Replicator replicator, long checksum) {
super.postFullSync(replicator, checksum);
atomicChecksum.compareAndSet(0, checksum);
}
});
redisReplicator.addCloseListener(new CloseListener() {
@Override
public void handle(Replicator replicator) {
System.out.println("close testChecksumV7");
assertEquals(19, acc.get());
assertEquals(6576517133597126869L, atomicChecksum.get());
}
});
redisReplicator.open();
}
#location 28
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static void main(String[] args) throws IOException, URISyntaxException {
final OutputStream out = new BufferedOutputStream(new FileOutputStream(new File("/path/to/dump.rdb")));
final RawByteListener rawByteListener = new RawByteListener() {
@Override
public void handle(byte... rawBytes) {
try {
out.write(rawBytes);
} catch (IOException ignore) {
}
}
};
//save rdb from remote server
Replicator replicator = new RedisReplicator("redis://127.0.0.1:6379");
replicator.addRdbListener(new RdbListener() {
@Override
public void preFullSync(Replicator replicator) {
replicator.addRawByteListener(rawByteListener);
}
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
}
@Override
public void postFullSync(Replicator replicator, long checksum) {
replicator.removeRawByteListener(rawByteListener);
try {
out.close();
replicator.close();
} catch (IOException ignore) {
}
}
});
replicator.open();
//check rdb file
replicator = new RedisReplicator("redis:///path/to/dump.rdb");
replicator.addRdbListener(new RdbListener.Adaptor() {
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
System.out.println(kv);
}
});
replicator.open();
}
|
#vulnerable code
public static void main(String[] args) throws IOException {
final FileOutputStream out = new FileOutputStream(new File("./src/test/resources/dump.rdb"));
final RawByteListener rawByteListener = new RawByteListener() {
@Override
public void handle(byte... rawBytes) {
try {
out.write(rawBytes);
} catch (IOException ignore) {
}
}
};
//save rdb from remote server
Replicator replicator = new RedisReplicator("127.0.0.1", 6379, Configuration.defaultSetting());
replicator.addRdbListener(new RdbListener() {
@Override
public void preFullSync(Replicator replicator) {
replicator.addRawByteListener(rawByteListener);
}
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
}
@Override
public void postFullSync(Replicator replicator, long checksum) {
replicator.removeRawByteListener(rawByteListener);
try {
out.close();
replicator.close();
} catch (IOException ignore) {
}
}
});
replicator.open();
//check rdb file
replicator = new RedisReplicator(new File("./src/test/resources/dump.rdb"), FileType.RDB, Configuration.defaultSetting());
replicator.addRdbListener(new RdbListener.Adaptor() {
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
System.out.println(kv);
}
});
replicator.open();
}
#location 36
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void open() throws IOException {
try {
doOpen();
} finally {
close();
}
}
|
#vulnerable code
@Override
public void open() throws IOException {
for (int i = 0; i < configuration.getRetries() || configuration.getRetries() <= 0; i++) {
try {
connect();
if (configuration.getAuthPassword() != null) auth(configuration.getAuthPassword());
sendSlavePort();
sendSlaveIp();
sendSlaveCapa();
//reset retries
i = 0;
logger.info("PSYNC " + configuration.getMasterRunId() + " " + String.valueOf(configuration.getOffset()));
send("PSYNC".getBytes(), configuration.getMasterRunId().getBytes(), String.valueOf(configuration.getOffset()).getBytes());
final String reply = (String) reply();
SyncMode syncMode = trySync(reply);
//bug fix.
if (syncMode == SyncMode.PSYNC && connected.get()) {
//heart beat send REPLCONF ACK ${slave offset}
synchronized (this) {
heartBeat = new Timer("heart beat");
//bug fix. in this point closed by other thread. multi-thread issue
heartBeat.schedule(new TimerTask() {
@Override
public void run() {
try {
send("REPLCONF".getBytes(), "ACK".getBytes(), String.valueOf(configuration.getOffset()).getBytes());
} catch (IOException e) {
//NOP
}
}
}, configuration.getHeartBeatPeriod(), configuration.getHeartBeatPeriod());
logger.info("heart beat started.");
}
}
//sync command
while (connected.get()) {
Object obj = replyParser.parse(new OffsetHandler() {
@Override
public void handle(long len) {
configuration.addOffset(len);
}
});
//command
if (obj instanceof Object[]) {
if (configuration.isVerbose() && logger.isDebugEnabled())
logger.debug(Arrays.deepToString((Object[]) obj));
Object[] command = (Object[]) obj;
CommandName cmdName = CommandName.name((String) command[0]);
Object[] params = new Object[command.length - 1];
System.arraycopy(command, 1, params, 0, params.length);
final CommandParser<? extends Command> operations;
//if command do not register. ignore
if ((operations = commands.get(cmdName)) == null) continue;
//do command replyParser
Command parsedCommand = operations.parse(cmdName, params);
//submit event
this.submitEvent(parsedCommand);
} else {
if (logger.isInfoEnabled()) logger.info("Redis reply:" + obj);
}
}
//connected = false
break;
} catch (/*bug fix*/IOException e) {
//close socket manual
if (!connected.get()) {
break;
}
logger.error("socket error", e);
//connect refused
//connect timeout
//read timeout
//connect abort
//server disconnect connection EOFException
close();
//retry psync in next loop.
logger.info("reconnect to redis-server. retry times:" + (i + 1));
try {
Thread.sleep(configuration.getRetryTimeInterval());
} catch (InterruptedException e1) {
Thread.currentThread().interrupt();
}
}
}
doCloseListener();
}
#location 13
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testFilter() throws IOException, InterruptedException {
Replicator redisReplicator = new RedisReplicator(
RedisSocketReplicatorTest.class.getClassLoader().getResourceAsStream("dumpV7.rdb"), FileType.RDB,
Configuration.defaultSetting());
final AtomicInteger acc = new AtomicInteger(0);
redisReplicator.addRdbListener(new RdbListener.Adaptor() {
@Override
public void preFullSync(Replicator replicator) {
super.preFullSync(replicator);
assertEquals(0, acc.get());
}
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
if (kv.getValueRdbType() == 0) {
acc.incrementAndGet();
}
}
@Override
public void postFullSync(Replicator replicator, long checksum) {
super.postFullSync(replicator, checksum);
}
});
redisReplicator.addCloseListener(new CloseListener() {
@Override
public void handle(Replicator replicator) {
System.out.println("close testFilter");
assertEquals(13, acc.get());
}
});
redisReplicator.open();
}
|
#vulnerable code
@Test
public void testFilter() throws IOException, InterruptedException {
Replicator redisReplicator = new RedisReplicator(
RedisSocketReplicatorTest.class.getClassLoader().getResourceAsStream("dumpV7.rdb"),
Configuration.defaultSetting());
final AtomicInteger acc = new AtomicInteger(0);
redisReplicator.addRdbListener(new RdbListener.Adaptor() {
@Override
public void preFullSync(Replicator replicator) {
super.preFullSync(replicator);
assertEquals(0, acc.get());
}
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
if (kv.getValueRdbType() == 0) {
acc.incrementAndGet();
}
}
@Override
public void postFullSync(Replicator replicator, long checksum) {
super.postFullSync(replicator, checksum);
}
});
redisReplicator.addCloseListener(new CloseListener() {
@Override
public void handle(Replicator replicator) {
System.out.println("close testFilter");
assertEquals(13, acc.get());
}
});
redisReplicator.open();
}
#location 33
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
protected void notify(byte... bytes) {
if (rawByteListeners == null || rawByteListeners.isEmpty()) return;
for (RawByteListener listener : rawByteListeners) {
listener.handle(bytes);
}
}
|
#vulnerable code
protected void notify(byte... bytes) {
if (listeners == null || listeners.isEmpty()) return;
for (RawByteListener listener : listeners) {
listener.handle(bytes);
}
}
#location 3
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void open() throws IOException {
try {
doOpen();
} finally {
close();
}
}
|
#vulnerable code
@Override
public void open() throws IOException {
for (int i = 0; i < configuration.getRetries() || configuration.getRetries() <= 0; i++) {
try {
connect();
if (configuration.getAuthPassword() != null) auth(configuration.getAuthPassword());
sendSlavePort();
sendSlaveIp();
sendSlaveCapa();
//reset retries
i = 0;
logger.info("PSYNC " + configuration.getMasterRunId() + " " + String.valueOf(configuration.getOffset()));
send("PSYNC".getBytes(), configuration.getMasterRunId().getBytes(), String.valueOf(configuration.getOffset()).getBytes());
final String reply = (String) reply();
SyncMode syncMode = trySync(reply);
//bug fix.
if (syncMode == SyncMode.PSYNC && connected.get()) {
//heart beat send REPLCONF ACK ${slave offset}
synchronized (this) {
heartBeat = new Timer("heart beat");
//bug fix. in this point closed by other thread. multi-thread issue
heartBeat.schedule(new TimerTask() {
@Override
public void run() {
try {
send("REPLCONF".getBytes(), "ACK".getBytes(), String.valueOf(configuration.getOffset()).getBytes());
} catch (IOException e) {
//NOP
}
}
}, configuration.getHeartBeatPeriod(), configuration.getHeartBeatPeriod());
logger.info("heart beat started.");
}
}
//sync command
while (connected.get()) {
Object obj = replyParser.parse(new OffsetHandler() {
@Override
public void handle(long len) {
configuration.addOffset(len);
}
});
//command
if (obj instanceof Object[]) {
if (configuration.isVerbose() && logger.isDebugEnabled())
logger.debug(Arrays.deepToString((Object[]) obj));
Object[] command = (Object[]) obj;
CommandName cmdName = CommandName.name((String) command[0]);
Object[] params = new Object[command.length - 1];
System.arraycopy(command, 1, params, 0, params.length);
final CommandParser<? extends Command> operations;
//if command do not register. ignore
if ((operations = commands.get(cmdName)) == null) continue;
//do command replyParser
Command parsedCommand = operations.parse(cmdName, params);
//submit event
this.submitEvent(parsedCommand);
} else {
if (logger.isInfoEnabled()) logger.info("Redis reply:" + obj);
}
}
//connected = false
break;
} catch (/*bug fix*/IOException e) {
//close socket manual
if (!connected.get()) {
break;
}
logger.error("socket error", e);
//connect refused
//connect timeout
//read timeout
//connect abort
//server disconnect connection EOFException
close();
//retry psync in next loop.
logger.info("reconnect to redis-server. retry times:" + (i + 1));
try {
Thread.sleep(configuration.getRetryTimeInterval());
} catch (InterruptedException e1) {
Thread.currentThread().interrupt();
}
}
}
doCloseListener();
}
#location 9
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public Event applyHashZipList(RedisInputStream in, DB db, int version) throws IOException {
/*
* |<zlbytes>| <zltail>| <zllen>| <entry> ...<entry> | <zlend>|
* | 4 bytes | 4 bytes | 2bytes | zipListEntry ... | 1byte |
*/
BaseRdbParser parser = new BaseRdbParser(in);
KeyStringValueHash o13 = new KeyStringValueHash();
byte[] key = parser.rdbLoadEncodedStringObject().first();
ByteArray aux = parser.rdbLoadPlainStringObject();
RedisInputStream stream = new RedisInputStream(new ByteArrayInputStream(aux));
Map<String, String> map = new LinkedHashMap<>();
Map<byte[], byte[]> rawMap = new LinkedHashMap<>();
int zlbytes = BaseRdbParser.LenHelper.zlbytes(stream);
int zltail = BaseRdbParser.LenHelper.zltail(stream);
int zllen = BaseRdbParser.LenHelper.zllen(stream);
while (zllen > 0) {
byte[] field = BaseRdbParser.StringHelper.zipListEntry(stream);
zllen--;
byte[] value = BaseRdbParser.StringHelper.zipListEntry(stream);
zllen--;
map.put(new String(field, CHARSET), new String(value, CHARSET));
rawMap.put(field, value);
}
int zlend = BaseRdbParser.LenHelper.zlend(stream);
if (zlend != 255) {
throw new AssertionError("zlend expect 255 but " + zlend);
}
o13.setValueRdbType(RDB_TYPE_HASH_ZIPLIST);
o13.setValue(map);
o13.setRawValue(rawMap);
o13.setDb(db);
o13.setKey(new String(key, CHARSET));
o13.setRawKey(key);
return o13;
}
|
#vulnerable code
@Override
public Event applyHashZipList(RedisInputStream in, DB db, int version) throws IOException {
/*
* |<zlbytes>| <zltail>| <zllen>| <entry> ...<entry> | <zlend>|
* | 4 bytes | 4 bytes | 2bytes | zipListEntry ... | 1byte |
*/
BaseRdbParser parser = new BaseRdbParser(in);
KeyStringValueHash o13 = new KeyStringValueHash();
String key = parser.rdbLoadEncodedStringObject().string;
ByteArray aux = parser.rdbLoadPlainStringObject();
RedisInputStream stream = new RedisInputStream(new ByteArrayInputStream(aux));
Map<String, String> map = new LinkedHashMap<>();
int zlbytes = BaseRdbParser.LenHelper.zlbytes(stream);
int zltail = BaseRdbParser.LenHelper.zltail(stream);
int zllen = BaseRdbParser.LenHelper.zllen(stream);
while (zllen > 0) {
String field = BaseRdbParser.StringHelper.zipListEntry(stream);
zllen--;
String value = BaseRdbParser.StringHelper.zipListEntry(stream);
zllen--;
map.put(field, value);
}
int zlend = BaseRdbParser.LenHelper.zlend(stream);
if (zlend != 255) {
throw new AssertionError("zlend expect 255 but " + zlend);
}
o13.setValueRdbType(RDB_TYPE_HASH_ZIPLIST);
o13.setValue(map);
o13.setDb(db);
o13.setKey(key);
return o13;
}
#location 24
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static void main(String[] args) throws Exception {
Replicator r = new RedisReplicator("redis:///path/to/dump.rdb");
r.setRdbVisitor(new ValueIterableRdbVisitor(r));
r.addRdbListener(new HugeKVRdbListener(200) {
@Override
public void handleString(boolean last, byte[] key, byte[] value, int type) {
// your business code goes here.
}
@Override
public void handleModule(boolean last, byte[] key, Module value, int type) {
// your business code goes here.
}
@Override
public void handleList(boolean last, byte[] key, List<byte[]> list, int type) {
// your business code goes here.
}
@Override
public void handleZSetEntry(boolean last, byte[] key, List<ZSetEntry> list, int type) {
// your business code goes here.
}
@Override
public void handleMap(boolean last, byte[] key, List<Map.Entry<byte[], byte[]>> list, int type) {
// your business code goes here.
}
});
r.open();
}
|
#vulnerable code
public static void main(String[] args) throws IOException {
Replicator r = new RedisReplicator(new File("./src/test/resources/dumpV7.rdb"), FileType.RDB, Configuration.defaultSetting());
r.setRdbVisitor(new ValueIterableRdbVisitor(r));
r.addRdbListener(new HugeKVRdbListener(200) {
@Override
public void handleString(boolean last, byte[] key, byte[] value, int type) {
// your business code goes here.
}
@Override
public void handleModule(boolean last, byte[] key, Module value, int type) {
// your business code goes here.
}
@Override
public void handleList(boolean last, byte[] key, List<byte[]> list, int type) {
// your business code goes here.
}
@Override
public void handleZSetEntry(boolean last, byte[] key, List<ZSetEntry> list, int type) {
// your business code goes here.
}
@Override
public void handleMap(boolean last, byte[] key, List<Map.Entry<byte[], byte[]>> list, int type) {
// your business code goes here.
}
});
r.open();
}
#location 30
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void template(String filename, final ConcurrentHashMap<String, KeyValuePair> map) {
try {
Replicator replicator = new RedisReplicator(RdbParserTest.class.
getClassLoader().getResourceAsStream(filename)
, FileType.RDB, Configuration.defaultSetting());
replicator.addRdbListener(new RdbListener.Adaptor() {
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
map.put(kv.getKey(), kv);
}
});
replicator.open();
} catch (Exception e) {
TestCase.fail();
}
}
|
#vulnerable code
public void template(String filename, final ConcurrentHashMap<String, KeyValuePair> map) {
try {
Replicator replicator = new RedisReplicator(RdbParserTest.class.
getClassLoader().getResourceAsStream(filename)
, Configuration.defaultSetting());
replicator.addRdbListener(new RdbListener.Adaptor() {
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
map.put(kv.getKey(), kv);
}
});
replicator.open();
} catch (Exception e) {
TestCase.fail();
}
}
#location 12
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void close() {
if (!connected.compareAndSet(true, false)) return;
synchronized (this) {
if (heartBeat != null) {
heartBeat.cancel();
heartBeat = null;
logger.info("heart beat canceled.");
}
}
try {
if (inputStream != null) inputStream.close();
} catch (IOException e) {
//NOP
}
try {
if (outputStream != null) outputStream.close();
} catch (IOException e) {
//NOP
}
try {
if (socket != null && !socket.isClosed()) socket.close();
} catch (IOException e) {
//NOP
}
logger.info("channel closed");
}
|
#vulnerable code
@Override
public void close() {
if (!connected.compareAndSet(true, false)) return;
if (heartBeat != null) {
heartBeat.cancel();
heartBeat = null;
logger.info("heart beat canceled.");
}
try {
if (inputStream != null) inputStream.close();
} catch (IOException e) {
//NOP
}
try {
if (outputStream != null) outputStream.close();
} catch (IOException e) {
//NOP
}
try {
if (socket != null && !socket.isClosed()) socket.close();
} catch (IOException e) {
//NOP
}
logger.info("channel closed");
}
#location 5
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static void main(String[] args) throws IOException, URISyntaxException {
final OutputStream out = new BufferedOutputStream(new FileOutputStream(new File("/path/to/appendonly.aof")));
final RawByteListener rawByteListener = new RawByteListener() {
@Override
public void handle(byte... rawBytes) {
try {
out.write(rawBytes);
} catch (IOException ignore) {
}
}
};
//save 1000 records commands
Replicator replicator = new RedisReplicator("redis://127.0.0.1:6379");
replicator.addRdbListener(new RdbListener() {
@Override
public void preFullSync(Replicator replicator) {
}
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
}
@Override
public void postFullSync(Replicator replicator, long checksum) {
replicator.addRawByteListener(rawByteListener);
}
});
final AtomicInteger acc = new AtomicInteger(0);
replicator.addCommandListener(new CommandListener() {
@Override
public void handle(Replicator replicator, Command command) {
if (acc.incrementAndGet() == 1000) {
try {
out.close();
replicator.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
});
replicator.open();
//check aof file
replicator = new RedisReplicator("redis:///path/to/appendonly.aof");
replicator.addCommandListener(new CommandListener() {
@Override
public void handle(Replicator replicator, Command command) {
System.out.println(command);
}
});
replicator.open();
}
|
#vulnerable code
public static void main(String[] args) throws IOException {
final FileOutputStream out = new FileOutputStream(new File("./src/test/resources/appendonly.aof"));
final RawByteListener rawByteListener = new RawByteListener() {
@Override
public void handle(byte... rawBytes) {
try {
out.write(rawBytes);
} catch (IOException ignore) {
}
}
};
//save 1000 records commands
Replicator replicator = new RedisReplicator("127.0.0.1", 6379, Configuration.defaultSetting());
replicator.addRdbListener(new RdbListener() {
@Override
public void preFullSync(Replicator replicator) {
}
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
}
@Override
public void postFullSync(Replicator replicator, long checksum) {
replicator.addRawByteListener(rawByteListener);
}
});
final AtomicInteger acc = new AtomicInteger(0);
replicator.addCommandListener(new CommandListener() {
@Override
public void handle(Replicator replicator, Command command) {
if (acc.incrementAndGet() == 1000) {
try {
out.close();
replicator.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
});
replicator.open();
//check aof file
replicator = new RedisReplicator(new File("./src/test/resources/appendonly.aof"), FileType.AOF, Configuration.defaultSetting());
replicator.addCommandListener(new CommandListener() {
@Override
public void handle(Replicator replicator, Command command) {
System.out.println(command);
}
});
replicator.open();
}
#location 31
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static void main(String[] args) throws IOException, URISyntaxException {
final OutputStream out = new BufferedOutputStream(new FileOutputStream(new File("/path/to/appendonly.aof")));
final RawByteListener rawByteListener = new RawByteListener() {
@Override
public void handle(byte... rawBytes) {
try {
out.write(rawBytes);
} catch (IOException ignore) {
}
}
};
//save 1000 records commands
Replicator replicator = new RedisReplicator("redis://127.0.0.1:6379");
replicator.addRdbListener(new RdbListener() {
@Override
public void preFullSync(Replicator replicator) {
}
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
}
@Override
public void postFullSync(Replicator replicator, long checksum) {
replicator.addRawByteListener(rawByteListener);
}
});
final AtomicInteger acc = new AtomicInteger(0);
replicator.addCommandListener(new CommandListener() {
@Override
public void handle(Replicator replicator, Command command) {
if (acc.incrementAndGet() == 1000) {
try {
out.close();
replicator.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
});
replicator.open();
//check aof file
replicator = new RedisReplicator("redis:///path/to/appendonly.aof");
replicator.addCommandListener(new CommandListener() {
@Override
public void handle(Replicator replicator, Command command) {
System.out.println(command);
}
});
replicator.open();
}
|
#vulnerable code
public static void main(String[] args) throws IOException {
final FileOutputStream out = new FileOutputStream(new File("./src/test/resources/appendonly.aof"));
final RawByteListener rawByteListener = new RawByteListener() {
@Override
public void handle(byte... rawBytes) {
try {
out.write(rawBytes);
} catch (IOException ignore) {
}
}
};
//save 1000 records commands
Replicator replicator = new RedisReplicator("127.0.0.1", 6379, Configuration.defaultSetting());
replicator.addRdbListener(new RdbListener() {
@Override
public void preFullSync(Replicator replicator) {
}
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
}
@Override
public void postFullSync(Replicator replicator, long checksum) {
replicator.addRawByteListener(rawByteListener);
}
});
final AtomicInteger acc = new AtomicInteger(0);
replicator.addCommandListener(new CommandListener() {
@Override
public void handle(Replicator replicator, Command command) {
if (acc.incrementAndGet() == 1000) {
try {
out.close();
replicator.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
});
replicator.open();
//check aof file
replicator = new RedisReplicator(new File("./src/test/resources/appendonly.aof"), FileType.AOF, Configuration.defaultSetting());
replicator.addCommandListener(new CommandListener() {
@Override
public void handle(Replicator replicator, Command command) {
System.out.println(command);
}
});
replicator.open();
}
#location 44
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public Event applyListQuickList(RedisInputStream in, DB db, int version) throws IOException {
BaseRdbParser parser = new BaseRdbParser(in);
KeyStringValueList o14 = new KeyStringValueList();
byte[] key = parser.rdbLoadEncodedStringObject().first();
long len = parser.rdbLoadLen().len;
List<String> stringList = new ArrayList<>();
List<byte[]> byteList = new ArrayList<>();
for (int i = 0; i < len; i++) {
ByteArray element = parser.rdbGenericLoadStringObject(RDB_LOAD_NONE);
RedisInputStream stream = new RedisInputStream(new ByteArrayInputStream(element));
List<String> list = new ArrayList<>();
List<byte[]> rawList = new ArrayList<>();
int zlbytes = BaseRdbParser.LenHelper.zlbytes(stream);
int zltail = BaseRdbParser.LenHelper.zltail(stream);
int zllen = BaseRdbParser.LenHelper.zllen(stream);
for (int j = 0; j < zllen; j++) {
byte[] e = BaseRdbParser.StringHelper.zipListEntry(stream);
list.add(new String(e, CHARSET));
rawList.add(e);
}
int zlend = BaseRdbParser.LenHelper.zlend(stream);
if (zlend != 255) {
throw new AssertionError("zlend expect 255 but " + zlend);
}
stringList.addAll(list);
byteList.addAll(rawList);
}
o14.setValueRdbType(RDB_TYPE_LIST_QUICKLIST);
o14.setValue(stringList);
o14.setRawValue(byteList);
o14.setDb(db);
o14.setKey(new String(key, CHARSET));
o14.setRawKey(key);
return o14;
}
|
#vulnerable code
@Override
public Event applyListQuickList(RedisInputStream in, DB db, int version) throws IOException {
BaseRdbParser parser = new BaseRdbParser(in);
KeyStringValueList o14 = new KeyStringValueList();
String key = parser.rdbLoadEncodedStringObject().string;
long len = parser.rdbLoadLen().len;
List<String> byteList = new ArrayList<>();
for (int i = 0; i < len; i++) {
ByteArray element = (ByteArray) parser.rdbGenericLoadStringObject(RDB_LOAD_NONE);
RedisInputStream stream = new RedisInputStream(new ByteArrayInputStream(element));
List<String> list = new ArrayList<>();
int zlbytes = BaseRdbParser.LenHelper.zlbytes(stream);
int zltail = BaseRdbParser.LenHelper.zltail(stream);
int zllen = BaseRdbParser.LenHelper.zllen(stream);
for (int j = 0; j < zllen; j++) {
list.add(BaseRdbParser.StringHelper.zipListEntry(stream));
}
int zlend = BaseRdbParser.LenHelper.zlend(stream);
if (zlend != 255) {
throw new AssertionError("zlend expect 255 but " + zlend);
}
byteList.addAll(list);
}
o14.setValueRdbType(RDB_TYPE_LIST_QUICKLIST);
o14.setValue(byteList);
o14.setDb(db);
o14.setKey(key);
return o14;
}
#location 19
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void open() throws IOException {
try {
doOpen();
} finally {
close();
}
}
|
#vulnerable code
@Override
public void open() throws IOException {
for (int i = 0; i < configuration.getRetries() || configuration.getRetries() <= 0; i++) {
try {
connect();
if (configuration.getAuthPassword() != null) auth(configuration.getAuthPassword());
sendSlavePort();
sendSlaveIp();
sendSlaveCapa();
//reset retries
i = 0;
logger.info("PSYNC " + configuration.getMasterRunId() + " " + String.valueOf(configuration.getOffset()));
send("PSYNC".getBytes(), configuration.getMasterRunId().getBytes(), String.valueOf(configuration.getOffset()).getBytes());
final String reply = (String) reply();
SyncMode syncMode = trySync(reply);
//bug fix.
if (syncMode == SyncMode.PSYNC && connected.get()) {
//heart beat send REPLCONF ACK ${slave offset}
synchronized (this) {
heartBeat = new Timer("heart beat");
//bug fix. in this point closed by other thread. multi-thread issue
heartBeat.schedule(new TimerTask() {
@Override
public void run() {
try {
send("REPLCONF".getBytes(), "ACK".getBytes(), String.valueOf(configuration.getOffset()).getBytes());
} catch (IOException e) {
//NOP
}
}
}, configuration.getHeartBeatPeriod(), configuration.getHeartBeatPeriod());
logger.info("heart beat started.");
}
}
//sync command
while (connected.get()) {
Object obj = replyParser.parse(new OffsetHandler() {
@Override
public void handle(long len) {
configuration.addOffset(len);
}
});
//command
if (obj instanceof Object[]) {
if (configuration.isVerbose() && logger.isDebugEnabled())
logger.debug(Arrays.deepToString((Object[]) obj));
Object[] command = (Object[]) obj;
CommandName cmdName = CommandName.name((String) command[0]);
Object[] params = new Object[command.length - 1];
System.arraycopy(command, 1, params, 0, params.length);
final CommandParser<? extends Command> operations;
//if command do not register. ignore
if ((operations = commands.get(cmdName)) == null) continue;
//do command replyParser
Command parsedCommand = operations.parse(cmdName, params);
//submit event
this.submitEvent(parsedCommand);
} else {
if (logger.isInfoEnabled()) logger.info("Redis reply:" + obj);
}
}
//connected = false
break;
} catch (/*bug fix*/IOException e) {
//close socket manual
if (!connected.get()) {
break;
}
logger.error("socket error", e);
//connect refused
//connect timeout
//read timeout
//connect abort
//server disconnect connection EOFException
close();
//retry psync in next loop.
logger.info("reconnect to redis-server. retry times:" + (i + 1));
try {
Thread.sleep(configuration.getRetryTimeInterval());
} catch (InterruptedException e1) {
Thread.currentThread().interrupt();
}
}
}
doCloseListener();
}
#location 22
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testChecksumV6() throws IOException, InterruptedException {
Replicator redisReplicator = new RedisReplicator(
RedisSocketReplicatorTest.class.getClassLoader().getResourceAsStream("dumpV6.rdb"), FileType.RDB,
Configuration.defaultSetting());
final AtomicInteger acc = new AtomicInteger(0);
final AtomicLong atomicChecksum = new AtomicLong(0);
redisReplicator.addRdbListener(new RdbListener.Adaptor() {
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
acc.incrementAndGet();
}
@Override
public void postFullSync(Replicator replicator, long checksum) {
super.postFullSync(replicator, checksum);
atomicChecksum.compareAndSet(0, checksum);
}
});
redisReplicator.addCloseListener(new CloseListener() {
@Override
public void handle(Replicator replicator) {
System.out.println("close testChecksumV6");
assertEquals(132, acc.get());
assertEquals(-3409494954737929802L, atomicChecksum.get());
}
});
redisReplicator.open();
}
|
#vulnerable code
@Test
public void testChecksumV6() throws IOException, InterruptedException {
Replicator redisReplicator = new RedisReplicator(
RedisSocketReplicatorTest.class.getClassLoader().getResourceAsStream("dumpV6.rdb"),
Configuration.defaultSetting());
final AtomicInteger acc = new AtomicInteger(0);
final AtomicLong atomicChecksum = new AtomicLong(0);
redisReplicator.addRdbListener(new RdbListener.Adaptor() {
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
acc.incrementAndGet();
}
@Override
public void postFullSync(Replicator replicator, long checksum) {
super.postFullSync(replicator, checksum);
atomicChecksum.compareAndSet(0, checksum);
}
});
redisReplicator.addCloseListener(new CloseListener() {
@Override
public void handle(Replicator replicator) {
System.out.println("close testChecksumV6");
assertEquals(132, acc.get());
assertEquals(-3409494954737929802L, atomicChecksum.get());
}
});
redisReplicator.open();
}
#location 28
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testSync() throws Exception {
RedisReplicator replicator = new RedisReplicator(new File("dump.rdb"));
replicator.addRdbFilter(new RdbFilter() {
@Override
public boolean accept(KeyValuePair<?> kv) {
return kv.getKey().startsWith("SESSION");
}
});
replicator.addRdbListener(new RdbListener() {
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
System.out.println(kv);
}
});
replicator.open();
//socket
replicator = new RedisReplicator("127.0.0.1", 6379);
replicator.addRdbListener(new RdbListener() {
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
System.out.println(kv);
}
});
replicator.addCommandListener(new CommandListener() {
@Override
public void handle(Replicator replicator, Command command) {
System.out.println(command);
}
});
replicator.open();
}
|
#vulnerable code
@Test
public void testSync() throws Exception {
RedisReplicator replicator = new RedisReplicator(RedisReplicatorTest.class.getClassLoader().getResourceAsStream("dump.rdb"));
replicator.addRdbFilter(new RdbFilter() {
@Override
public boolean accept(KeyValuePair<?> kv) {
return kv.getKey().startsWith("SESSION");
}
});
replicator.addRdbListener(new RdbListener() {
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
System.out.println(kv);
}
});
replicator.open();
replicator.close();
//socket
replicator = new RedisReplicator("127.0.0.1", 6379);
replicator.addRdbListener(new RdbListener() {
@Override
public void handle(Replicator replicator, KeyValuePair<?> kv) {
System.out.println(kv);
}
});
replicator.addCommandListener(new CommandListener() {
@Override
public void handle(Replicator replicator, Command command) {
System.out.println(command);
}
});
replicator.open();
}
#location 17
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public DuplicateResult findDuplicates(State state)
{
DuplicateResult result = new DuplicateResult(parameters);
List<FileState> fileStates = new ArrayList<>(state.getFileStates());
Collections.sort(fileStates, hashComparator);
List<FileState> duplicatedFiles = new ArrayList<>();
FileHash previousFileHash = new FileHash(FileState.NO_HASH, FileState.NO_HASH, FileState.NO_HASH);
for (FileState fileState : fileStates)
{
if (!previousFileHash.equals(fileState.getFileHash()))
{
result.addDuplicatedFiles(duplicatedFiles);
duplicatedFiles.clear();
}
previousFileHash = fileState.getFileHash();
duplicatedFiles.add(fileState);
}
result.addDuplicatedFiles(duplicatedFiles);
return result;
}
|
#vulnerable code
public DuplicateResult findDuplicates(State state)
{
DuplicateResult result = new DuplicateResult(parameters);
List<FileState> fileStates = new ArrayList<>(state.getFileStates());
Collections.sort(fileStates, fullHashComparator);
FileHash previousHash = new FileHash(FileState.NO_HASH, FileState.NO_HASH, FileState.NO_HASH);
for (FileState fileState : fileStates)
{
if (!previousHash.equals(fileState.getFileHash()))
{
result.addDuplicatedFiles(duplicatedFiles);
duplicatedFiles.clear();
}
previousHash = fileState.getFileHash();
duplicatedFiles.add(fileState);
}
result.addDuplicatedFiles(duplicatedFiles);
return result;
}
#location 11
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public CompareResult displayChanges()
{
if (lastState != null)
{
System.out.printf("Comparing with the last committed state from %s%n", formatDate(lastState.getTimestamp()));
if (lastState.getComment().length() > 0)
{
System.out.println("Comment: " + lastState.getComment());
}
Console.newLine();
}
if (!context.isVerbose())
{
displayCounts();
return this;
}
String stateFormat = "%-17s ";
final String addedStr = String.format(stateFormat, "Added:");
displayDifferences(addedStr, added,
diff -> System.out.printf(addedStr + "%s%n", diff.getFileState().getFileName()));
final String copiedStr = String.format(stateFormat, "Copied:");
displayDifferences(copiedStr, copied,
diff -> System.out.printf(copiedStr + "%s \t(was %s)%n", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName()));
final String duplicatedStr = String.format(stateFormat, "Duplicated:");
displayDifferences(duplicatedStr, duplicated,
diff -> System.out.printf(duplicatedStr + "%s = %s%s%n", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName(), formatModifiedAttributes(diff, true)));
final String dateModifiedStr = String.format(stateFormat, "Date modified:");
displayDifferences(dateModifiedStr, dateModified,
diff -> System.out.printf(dateModifiedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)));
final String contentModifiedStr = String.format(stateFormat, "Content modified:");
displayDifferences(contentModifiedStr, contentModified,
diff -> System.out.printf(contentModifiedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)));
final String attrsModifiedStr = String.format(stateFormat, "Attrs. modified:");
displayDifferences(attrsModifiedStr, attributesModified,
diff -> System.out.printf(attrsModifiedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)));
final String renamedStr = String.format(stateFormat, "Renamed:");
displayDifferences(renamedStr, renamed,
diff -> System.out.printf(renamedStr + "%s -> %s%s%n", diff.getPreviousFileState().getFileName(), diff.getFileState().getFileName(), formatModifiedAttributes(diff, true)));
final String deletedStr = String.format(stateFormat, "Deleted:");
displayDifferences(deletedStr, deleted,
diff -> System.out.printf(deletedStr + "%s%n", diff.getFileState().getFileName()));
final String corruptedStr = String.format(stateFormat, "Corrupted?:");
displayDifferences(corruptedStr, corrupted,
diff -> System.out.printf(corruptedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)));
if (somethingModified())
{
Console.newLine();
}
displayCounts();
return this;
}
|
#vulnerable code
public CompareResult displayChanges()
{
if (lastState != null)
{
System.out.printf("Comparing with the last committed state from %s%n", formatDate(lastState.getTimestamp()));
if (lastState.getComment().length() > 0)
{
System.out.println("Comment: " + lastState.getComment());
}
Console.newLine();
}
if (!context.isVerbose())
{
displayCounts();
return this;
}
String stateFormat = "%-17s ";
for (Difference diff : added)
{
System.out.printf(stateFormat + "%s%n", "Added:", diff.getFileState().getFileName());
}
for (Difference diff : copied)
{
System.out.printf(stateFormat + "%s \t(was %s)%n", "Copied:", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName());
}
for (Difference diff : duplicated)
{
System.out.printf(stateFormat + "%s = %s%s%n", "Duplicated:", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName(), formatModifiedAttributes(diff, true));
}
for (Difference diff : dateModified)
{
System.out.printf(stateFormat + "%s \t%s%n", "Date modified:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false));
}
for (Difference diff : contentModified)
{
System.out.printf(stateFormat + "%s \t%s%n", "Content modified:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false));
}
for (Difference diff : attributesModified)
{
System.out.printf(stateFormat + "%s \t%s%n", "Attrs. modified:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false));
}
for (Difference diff : renamed)
{
System.out.printf(stateFormat + "%s -> %s%s%n", "Renamed:", diff.getPreviousFileState().getFileName(), diff.getFileState().getFileName(), formatModifiedAttributes(diff, true));
}
for (Difference diff : deleted)
{
System.out.printf(stateFormat + "%s%n", "Deleted:", diff.getFileState().getFileName());
}
for (Difference diff : corrupted)
{
System.out.printf(stateFormat + "%s \t%s%n", "Corrupted?:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false));
}
if (somethingModified())
{
Console.newLine();
}
displayCounts();
return this;
}
#location 38
#vulnerability type CHECKERS_PRINTF_ARGS
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public State generateState(String comment, Path rootDir, Path dirToScan) throws NoSuchAlgorithmException
{
this.rootDir = rootDir;
Logger.info(String.format("Scanning recursively local files, %s, using %d thread", hashModeToString(context.getHashMode()), context.getThreadCount()));
if (hashProgress.isProgressDisplayed())
{
System.out.printf("(Hash progress legend for files grouped %d by %d: %s)%n", PROGRESS_DISPLAY_FILE_COUNT, PROGRESS_DISPLAY_FILE_COUNT, hashProgress.hashProgressLegend());
}
State state = new State();
state.setComment(comment);
state.setHashMode(context.getHashMode());
long start = System.currentTimeMillis();
hashProgress.progressOutputInit();
filesToHashQueue = new LinkedBlockingDeque<>(FILES_QUEUE_CAPACITY);
initializeFileHashers();
Path userDir = Paths.get(System.getProperty("user.dir"));
List<FileToIgnore> globalIgnore = fimIgnoreManager.loadFimIgnore(userDir);
scanFileTree(filesToHashQueue, dirToScan, globalIgnore);
// In case the FileHashers have not already been started
startFileHashers();
waitAllFilesToBeHashed();
for (FileHasher hasher : hashers)
{
state.getFileStates().addAll(hasher.getFileStates());
totalFileContentLength += hasher.getTotalFileContentLength();
totalBytesHashed += hasher.getTotalBytesHashed();
}
Collections.sort(state.getFileStates(), fileNameComparator);
state.setIgnoredFiles(fimIgnoreManager.getIgnoredFiles());
hashProgress.progressOutputStop();
displayStatistics(start, state);
return state;
}
|
#vulnerable code
public State generateState(String comment, Path rootDir, Path dirToScan) throws NoSuchAlgorithmException
{
this.rootDir = rootDir;
Logger.info(String.format("Scanning recursively local files, %s, using %d thread", hashModeToString(context.getHashMode()), context.getThreadCount()));
if (displayHashLegend())
{
System.out.printf("(Hash progress legend for files grouped %d by %d: %s)%n", PROGRESS_DISPLAY_FILE_COUNT, PROGRESS_DISPLAY_FILE_COUNT, hashProgressLegend());
}
State state = new State();
state.setComment(comment);
state.setHashMode(context.getHashMode());
long start = System.currentTimeMillis();
progressOutputInit();
filesToHashQueue = new LinkedBlockingDeque<>(FILES_QUEUE_CAPACITY);
initializeFileHashers();
Path userDir = Paths.get(System.getProperty("user.dir"));
List<FileToIgnore> globalIgnore = fimIgnoreManager.loadFimIgnore(userDir);
scanFileTree(filesToHashQueue, dirToScan, globalIgnore);
// In case the FileHashers have not already been started
startFileHashers();
waitAllFilesToBeHashed();
for (FileHasher hasher : hashers)
{
state.getFileStates().addAll(hasher.getFileStates());
totalFileContentLength += hasher.getTotalFileContentLength();
totalBytesHashed += hasher.getTotalBytesHashed();
}
Collections.sort(state.getFileStates(), fileNameComparator);
state.setIgnoredFiles(fimIgnoreManager.getIgnoredFiles());
progressOutputStop();
displayStatistics(start, state);
return state;
}
#location 41
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void outputInit() {
progressLock.lock();
try {
summedFileLength = 0;
fileCount = 0;
} finally {
progressLock.unlock();
}
}
|
#vulnerable code
public void outputInit() {
summedFileLength = 0;
fileCount = 0;
}
#location 3
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public CompareResult displayChanges()
{
if (lastState != null)
{
System.out.printf("Comparing with the last committed state from %s%n", formatDate(lastState.getTimestamp()));
if (lastState.getComment().length() > 0)
{
System.out.println("Comment: " + lastState.getComment());
}
Console.newLine();
}
if (!context.isVerbose())
{
displayCounts();
return this;
}
String stateFormat = "%-17s ";
final String addedStr = String.format(stateFormat, "Added:");
displayDifferences(addedStr, added,
diff -> System.out.printf(addedStr + "%s%n", diff.getFileState().getFileName()));
final String copiedStr = String.format(stateFormat, "Copied:");
displayDifferences(copiedStr, copied,
diff -> System.out.printf(copiedStr + "%s \t(was %s)%n", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName()));
final String duplicatedStr = String.format(stateFormat, "Duplicated:");
displayDifferences(duplicatedStr, duplicated,
diff -> System.out.printf(duplicatedStr + "%s = %s%s%n", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName(), formatModifiedAttributes(diff, true)));
final String dateModifiedStr = String.format(stateFormat, "Date modified:");
displayDifferences(dateModifiedStr, dateModified,
diff -> System.out.printf(dateModifiedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)));
final String contentModifiedStr = String.format(stateFormat, "Content modified:");
displayDifferences(contentModifiedStr, contentModified,
diff -> System.out.printf(contentModifiedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)));
final String attrsModifiedStr = String.format(stateFormat, "Attrs. modified:");
displayDifferences(attrsModifiedStr, attributesModified,
diff -> System.out.printf(attrsModifiedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)));
final String renamedStr = String.format(stateFormat, "Renamed:");
displayDifferences(renamedStr, renamed,
diff -> System.out.printf(renamedStr + "%s -> %s%s%n", diff.getPreviousFileState().getFileName(), diff.getFileState().getFileName(), formatModifiedAttributes(diff, true)));
final String deletedStr = String.format(stateFormat, "Deleted:");
displayDifferences(deletedStr, deleted,
diff -> System.out.printf(deletedStr + "%s%n", diff.getFileState().getFileName()));
final String corruptedStr = String.format(stateFormat, "Corrupted?:");
displayDifferences(corruptedStr, corrupted,
diff -> System.out.printf(corruptedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)));
if (somethingModified())
{
Console.newLine();
}
displayCounts();
return this;
}
|
#vulnerable code
public CompareResult displayChanges()
{
if (lastState != null)
{
System.out.printf("Comparing with the last committed state from %s%n", formatDate(lastState.getTimestamp()));
if (lastState.getComment().length() > 0)
{
System.out.println("Comment: " + lastState.getComment());
}
Console.newLine();
}
if (!context.isVerbose())
{
displayCounts();
return this;
}
String stateFormat = "%-17s ";
for (Difference diff : added)
{
System.out.printf(stateFormat + "%s%n", "Added:", diff.getFileState().getFileName());
}
for (Difference diff : copied)
{
System.out.printf(stateFormat + "%s \t(was %s)%n", "Copied:", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName());
}
for (Difference diff : duplicated)
{
System.out.printf(stateFormat + "%s = %s%s%n", "Duplicated:", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName(), formatModifiedAttributes(diff, true));
}
for (Difference diff : dateModified)
{
System.out.printf(stateFormat + "%s \t%s%n", "Date modified:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false));
}
for (Difference diff : contentModified)
{
System.out.printf(stateFormat + "%s \t%s%n", "Content modified:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false));
}
for (Difference diff : attributesModified)
{
System.out.printf(stateFormat + "%s \t%s%n", "Attrs. modified:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false));
}
for (Difference diff : renamed)
{
System.out.printf(stateFormat + "%s -> %s%s%n", "Renamed:", diff.getPreviousFileState().getFileName(), diff.getFileState().getFileName(), formatModifiedAttributes(diff, true));
}
for (Difference diff : deleted)
{
System.out.printf(stateFormat + "%s%n", "Deleted:", diff.getFileState().getFileName());
}
for (Difference diff : corrupted)
{
System.out.printf(stateFormat + "%s \t%s%n", "Corrupted?:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false));
}
if (somethingModified())
{
Console.newLine();
}
displayCounts();
return this;
}
#location 28
#vulnerability type CHECKERS_PRINTF_ARGS
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public State loadState(int stateNumber) throws IOException
{
File stateFile = getStateFile(stateNumber);
if (!stateFile.exists())
{
throw new IllegalStateException(String.format("Unable to load State file %d from directory %s", stateNumber, stateDir));
}
State state = State.loadFromGZipFile(stateFile);
adjustAccordingToHashMode(state);
return state;
}
|
#vulnerable code
public State loadState(int stateNumber) throws IOException
{
File stateFile = getStateFile(stateNumber);
if (!stateFile.exists())
{
throw new IllegalStateException(String.format("Unable to load State file %d from directory %s", stateNumber, stateDir));
}
State state = new State();
state.loadFromGZipFile(stateFile);
// Replace by 'no_hash' accurately to be able to compare the FileState entry
switch (parameters.getHashMode())
{
case DONT_HASH_FILES:
for (FileState fileState : state.getFileStates())
{
fileState.getFileHash().setFirstFourKiloHash(FileState.NO_HASH);
fileState.getFileHash().setFirstMegaHash(FileState.NO_HASH);
fileState.getFileHash().setFullHash(FileState.NO_HASH);
}
break;
case HASH_ONLY_FIRST_FOUR_KILO:
for (FileState fileState : state.getFileStates())
{
fileState.getFileHash().setFirstMegaHash(FileState.NO_HASH);
fileState.getFileHash().setFullHash(FileState.NO_HASH);
}
break;
case HASH_ONLY_FIRST_MEGA:
for (FileState fileState : state.getFileStates())
{
fileState.getFileHash().setFirstFourKiloHash(FileState.NO_HASH);
fileState.getFileHash().setFullHash(FileState.NO_HASH);
}
break;
case COMPUTE_ALL_HASH:
// Nothing to do
break;
}
return state;
}
#location 16
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public CompareResult displayChanges()
{
if (lastState != null)
{
System.out.printf("Comparing with the last committed state from %s%n", formatDate(lastState.getTimestamp()));
if (lastState.getComment().length() > 0)
{
System.out.println("Comment: " + lastState.getComment());
}
Console.newLine();
}
if (!context.isVerbose())
{
displayCounts();
return this;
}
String stateFormat = "%-17s ";
final String addedStr = String.format(stateFormat, "Added:");
displayDifferences(addedStr, added,
diff -> System.out.printf(addedStr + "%s%n", diff.getFileState().getFileName()));
final String copiedStr = String.format(stateFormat, "Copied:");
displayDifferences(copiedStr, copied,
diff -> System.out.printf(copiedStr + "%s \t(was %s)%n", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName()));
final String duplicatedStr = String.format(stateFormat, "Duplicated:");
displayDifferences(duplicatedStr, duplicated,
diff -> System.out.printf(duplicatedStr + "%s = %s%s%n", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName(), formatModifiedAttributes(diff, true)));
final String dateModifiedStr = String.format(stateFormat, "Date modified:");
displayDifferences(dateModifiedStr, dateModified,
diff -> System.out.printf(dateModifiedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)));
final String contentModifiedStr = String.format(stateFormat, "Content modified:");
displayDifferences(contentModifiedStr, contentModified,
diff -> System.out.printf(contentModifiedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)));
final String attrsModifiedStr = String.format(stateFormat, "Attrs. modified:");
displayDifferences(attrsModifiedStr, attributesModified,
diff -> System.out.printf(attrsModifiedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)));
final String renamedStr = String.format(stateFormat, "Renamed:");
displayDifferences(renamedStr, renamed,
diff -> System.out.printf(renamedStr + "%s -> %s%s%n", diff.getPreviousFileState().getFileName(), diff.getFileState().getFileName(), formatModifiedAttributes(diff, true)));
final String deletedStr = String.format(stateFormat, "Deleted:");
displayDifferences(deletedStr, deleted,
diff -> System.out.printf(deletedStr + "%s%n", diff.getFileState().getFileName()));
final String corruptedStr = String.format(stateFormat, "Corrupted?:");
displayDifferences(corruptedStr, corrupted,
diff -> System.out.printf(corruptedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)));
if (somethingModified())
{
Console.newLine();
}
displayCounts();
return this;
}
|
#vulnerable code
public CompareResult displayChanges()
{
if (lastState != null)
{
System.out.printf("Comparing with the last committed state from %s%n", formatDate(lastState.getTimestamp()));
if (lastState.getComment().length() > 0)
{
System.out.println("Comment: " + lastState.getComment());
}
Console.newLine();
}
if (!context.isVerbose())
{
displayCounts();
return this;
}
String stateFormat = "%-17s ";
for (Difference diff : added)
{
System.out.printf(stateFormat + "%s%n", "Added:", diff.getFileState().getFileName());
}
for (Difference diff : copied)
{
System.out.printf(stateFormat + "%s \t(was %s)%n", "Copied:", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName());
}
for (Difference diff : duplicated)
{
System.out.printf(stateFormat + "%s = %s%s%n", "Duplicated:", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName(), formatModifiedAttributes(diff, true));
}
for (Difference diff : dateModified)
{
System.out.printf(stateFormat + "%s \t%s%n", "Date modified:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false));
}
for (Difference diff : contentModified)
{
System.out.printf(stateFormat + "%s \t%s%n", "Content modified:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false));
}
for (Difference diff : attributesModified)
{
System.out.printf(stateFormat + "%s \t%s%n", "Attrs. modified:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false));
}
for (Difference diff : renamed)
{
System.out.printf(stateFormat + "%s -> %s%s%n", "Renamed:", diff.getPreviousFileState().getFileName(), diff.getFileState().getFileName(), formatModifiedAttributes(diff, true));
}
for (Difference diff : deleted)
{
System.out.printf(stateFormat + "%s%n", "Deleted:", diff.getFileState().getFileName());
}
for (Difference diff : corrupted)
{
System.out.printf(stateFormat + "%s \t%s%n", "Corrupted?:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false));
}
if (somethingModified())
{
Console.newLine();
}
displayCounts();
return this;
}
#location 53
#vulnerability type CHECKERS_PRINTF_ARGS
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public CompareResult displayChanges()
{
if (lastState != null)
{
System.out.printf("Comparing with the last committed state from %s%n", formatDate(lastState.getTimestamp()));
if (lastState.getComment().length() > 0)
{
System.out.println("Comment: " + lastState.getComment());
}
Console.newLine();
}
if (!context.isVerbose())
{
displayCounts();
return this;
}
String stateFormat = "%-17s ";
final String addedStr = String.format(stateFormat, "Added:");
displayDifferences(addedStr, added,
diff -> System.out.printf(addedStr + "%s%n", diff.getFileState().getFileName()));
final String copiedStr = String.format(stateFormat, "Copied:");
displayDifferences(copiedStr, copied,
diff -> System.out.printf(copiedStr + "%s \t(was %s)%n", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName()));
final String duplicatedStr = String.format(stateFormat, "Duplicated:");
displayDifferences(duplicatedStr, duplicated,
diff -> System.out.printf(duplicatedStr + "%s = %s%s%n", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName(), formatModifiedAttributes(diff, true)));
final String dateModifiedStr = String.format(stateFormat, "Date modified:");
displayDifferences(dateModifiedStr, dateModified,
diff -> System.out.printf(dateModifiedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)));
final String contentModifiedStr = String.format(stateFormat, "Content modified:");
displayDifferences(contentModifiedStr, contentModified,
diff -> System.out.printf(contentModifiedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)));
final String attrsModifiedStr = String.format(stateFormat, "Attrs. modified:");
displayDifferences(attrsModifiedStr, attributesModified,
diff -> System.out.printf(attrsModifiedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)));
final String renamedStr = String.format(stateFormat, "Renamed:");
displayDifferences(renamedStr, renamed,
diff -> System.out.printf(renamedStr + "%s -> %s%s%n", diff.getPreviousFileState().getFileName(), diff.getFileState().getFileName(), formatModifiedAttributes(diff, true)));
final String deletedStr = String.format(stateFormat, "Deleted:");
displayDifferences(deletedStr, deleted,
diff -> System.out.printf(deletedStr + "%s%n", diff.getFileState().getFileName()));
final String corruptedStr = String.format(stateFormat, "Corrupted?:");
displayDifferences(corruptedStr, corrupted,
diff -> System.out.printf(corruptedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)));
if (somethingModified())
{
Console.newLine();
}
displayCounts();
return this;
}
|
#vulnerable code
public CompareResult displayChanges()
{
if (lastState != null)
{
System.out.printf("Comparing with the last committed state from %s%n", formatDate(lastState.getTimestamp()));
if (lastState.getComment().length() > 0)
{
System.out.println("Comment: " + lastState.getComment());
}
Console.newLine();
}
if (!context.isVerbose())
{
displayCounts();
return this;
}
String stateFormat = "%-17s ";
for (Difference diff : added)
{
System.out.printf(stateFormat + "%s%n", "Added:", diff.getFileState().getFileName());
}
for (Difference diff : copied)
{
System.out.printf(stateFormat + "%s \t(was %s)%n", "Copied:", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName());
}
for (Difference diff : duplicated)
{
System.out.printf(stateFormat + "%s = %s%s%n", "Duplicated:", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName(), formatModifiedAttributes(diff, true));
}
for (Difference diff : dateModified)
{
System.out.printf(stateFormat + "%s \t%s%n", "Date modified:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false));
}
for (Difference diff : contentModified)
{
System.out.printf(stateFormat + "%s \t%s%n", "Content modified:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false));
}
for (Difference diff : attributesModified)
{
System.out.printf(stateFormat + "%s \t%s%n", "Attrs. modified:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false));
}
for (Difference diff : renamed)
{
System.out.printf(stateFormat + "%s -> %s%s%n", "Renamed:", diff.getPreviousFileState().getFileName(), diff.getFileState().getFileName(), formatModifiedAttributes(diff, true));
}
for (Difference diff : deleted)
{
System.out.printf(stateFormat + "%s%n", "Deleted:", diff.getFileState().getFileName());
}
for (Difference diff : corrupted)
{
System.out.printf(stateFormat + "%s \t%s%n", "Corrupted?:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false));
}
if (somethingModified())
{
Console.newLine();
}
displayCounts();
return this;
}
#location 33
#vulnerability type CHECKERS_PRINTF_ARGS
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public State generateState(String comment, File fimRepositoryRootDir) throws IOException, NoSuchAlgorithmException
{
Logger.info(String.format("Scanning recursively local files, %s, using %d thread", hashModeToString(), parameters.getThreadCount()));
System.out.printf(" (Hash progress legend for files grouped %d by %d: %s)%n", PROGRESS_DISPLAY_FILE_COUNT, PROGRESS_DISPLAY_FILE_COUNT, hashProgressLegend());
State state = new State();
state.setComment(comment);
long start = System.currentTimeMillis();
progressOutputInit();
BlockingDeque<File> filesToHash = new LinkedBlockingDeque<>(1000);
List<FileHasher> hashers = new ArrayList<>();
executorService = Executors.newFixedThreadPool(parameters.getThreadCount());
for (int index = 0; index < parameters.getThreadCount(); index++)
{
FileHasher hasher = new FileHasher(this, filesToHash, fimRepositoryRootDir.toString());
executorService.submit(hasher);
hashers.add(hasher);
}
scanFileTree(filesToHash, fimRepositoryRootDir);
waitAllFileHashed();
for (FileHasher hasher : hashers)
{
state.getFileStates().addAll(hasher.getFileStates());
totalFileContentLength += hasher.getTotalFileContentLength();
totalBytesHashed += hasher.getTotalBytesHashed();
}
Collections.sort(state.getFileStates(), fileNameComparator);
progressOutputStop();
displayStatistics(start, state);
return state;
}
|
#vulnerable code
public State generateState(String comment, File fimRepositoryRootDir) throws IOException, NoSuchAlgorithmException
{
Logger.info(String.format("Scanning recursively local files, %s, using %d thread", hashModeToString(), parameters.getThreadCount()));
System.out.printf(" (Hash progress legend: " + hashProgressLegend() + ")%n");
State state = new State();
state.setComment(comment);
long start = System.currentTimeMillis();
progressOutputInit();
BlockingDeque<File> filesToHash = new LinkedBlockingDeque<>(1000);
List<FileHasher> hashers = new ArrayList<>();
executorService = Executors.newFixedThreadPool(parameters.getThreadCount());
for (int index = 0; index < parameters.getThreadCount(); index++)
{
FileHasher hasher = new FileHasher(this, filesToHash, fimRepositoryRootDir.toString());
executorService.submit(hasher);
hashers.add(hasher);
}
scanFileTree(filesToHash, fimRepositoryRootDir);
waitAllFileHashed();
for (FileHasher hasher : hashers)
{
state.getFileStates().addAll(hasher.getFileStates());
totalFileContentLength += hasher.getTotalFileContentLength();
totalBytesHashed += hasher.getTotalBytesHashed();
}
Collections.sort(state.getFileStates(), fileNameComparator);
progressOutputStop();
displayStatistics(start, state);
return state;
}
#location 4
#vulnerability type CHECKERS_PRINTF_ARGS
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public CompareResult displayChanges()
{
if (lastState != null)
{
System.out.printf("Comparing with the last committed state from %s%n", formatDate(lastState.getTimestamp()));
if (lastState.getComment().length() > 0)
{
System.out.println("Comment: " + lastState.getComment());
}
Console.newLine();
}
if (!context.isVerbose())
{
displayCounts();
return this;
}
String stateFormat = "%-17s ";
final String addedStr = String.format(stateFormat, "Added:");
displayDifferences(addedStr, added,
diff -> System.out.printf(addedStr + "%s%n", diff.getFileState().getFileName()));
final String copiedStr = String.format(stateFormat, "Copied:");
displayDifferences(copiedStr, copied,
diff -> System.out.printf(copiedStr + "%s \t(was %s)%n", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName()));
final String duplicatedStr = String.format(stateFormat, "Duplicated:");
displayDifferences(duplicatedStr, duplicated,
diff -> System.out.printf(duplicatedStr + "%s = %s%s%n", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName(), formatModifiedAttributes(diff, true)));
final String dateModifiedStr = String.format(stateFormat, "Date modified:");
displayDifferences(dateModifiedStr, dateModified,
diff -> System.out.printf(dateModifiedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)));
final String contentModifiedStr = String.format(stateFormat, "Content modified:");
displayDifferences(contentModifiedStr, contentModified,
diff -> System.out.printf(contentModifiedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)));
final String attrsModifiedStr = String.format(stateFormat, "Attrs. modified:");
displayDifferences(attrsModifiedStr, attributesModified,
diff -> System.out.printf(attrsModifiedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)));
final String renamedStr = String.format(stateFormat, "Renamed:");
displayDifferences(renamedStr, renamed,
diff -> System.out.printf(renamedStr + "%s -> %s%s%n", diff.getPreviousFileState().getFileName(), diff.getFileState().getFileName(), formatModifiedAttributes(diff, true)));
final String deletedStr = String.format(stateFormat, "Deleted:");
displayDifferences(deletedStr, deleted,
diff -> System.out.printf(deletedStr + "%s%n", diff.getFileState().getFileName()));
final String corruptedStr = String.format(stateFormat, "Corrupted?:");
displayDifferences(corruptedStr, corrupted,
diff -> System.out.printf(corruptedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)));
if (somethingModified())
{
Console.newLine();
}
displayCounts();
return this;
}
|
#vulnerable code
public CompareResult displayChanges()
{
if (lastState != null)
{
System.out.printf("Comparing with the last committed state from %s%n", formatDate(lastState.getTimestamp()));
if (lastState.getComment().length() > 0)
{
System.out.println("Comment: " + lastState.getComment());
}
Console.newLine();
}
if (!context.isVerbose())
{
displayCounts();
return this;
}
String stateFormat = "%-17s ";
for (Difference diff : added)
{
System.out.printf(stateFormat + "%s%n", "Added:", diff.getFileState().getFileName());
}
for (Difference diff : copied)
{
System.out.printf(stateFormat + "%s \t(was %s)%n", "Copied:", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName());
}
for (Difference diff : duplicated)
{
System.out.printf(stateFormat + "%s = %s%s%n", "Duplicated:", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName(), formatModifiedAttributes(diff, true));
}
for (Difference diff : dateModified)
{
System.out.printf(stateFormat + "%s \t%s%n", "Date modified:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false));
}
for (Difference diff : contentModified)
{
System.out.printf(stateFormat + "%s \t%s%n", "Content modified:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false));
}
for (Difference diff : attributesModified)
{
System.out.printf(stateFormat + "%s \t%s%n", "Attrs. modified:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false));
}
for (Difference diff : renamed)
{
System.out.printf(stateFormat + "%s -> %s%s%n", "Renamed:", diff.getPreviousFileState().getFileName(), diff.getFileState().getFileName(), formatModifiedAttributes(diff, true));
}
for (Difference diff : deleted)
{
System.out.printf(stateFormat + "%s%n", "Deleted:", diff.getFileState().getFileName());
}
for (Difference diff : corrupted)
{
System.out.printf(stateFormat + "%s \t%s%n", "Corrupted?:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false));
}
if (somethingModified())
{
Console.newLine();
}
displayCounts();
return this;
}
#location 43
#vulnerability type CHECKERS_PRINTF_ARGS
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void outputInit() {
progressLock.lock();
try {
summedFileLength = 0;
fileCount = 0;
} finally {
progressLock.unlock();
}
}
|
#vulnerable code
public void outputInit() {
summedFileLength = 0;
fileCount = 0;
}
#location 2
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static void main(String[] args) throws IOException
{
String[] filteredArgs = filterEmptyArgs(args);
if (filteredArgs.length < 1)
{
youMustSpecifyACommandToRun();
}
Command command = Command.fromName(filteredArgs[0]);
if (command == null)
{
youMustSpecifyACommandToRun();
}
CommandLineParser cmdLineGnuParser = new GnuParser();
Options options = constructOptions();
CommandLine commandLine;
boolean verbose = true;
CompareMode compareMode = CompareMode.FULL;
String message = "";
boolean useLastState = false;
int threadCount = Runtime.getRuntime().availableProcessors();
try
{
String[] actionArgs = Arrays.copyOfRange(filteredArgs, 1, filteredArgs.length);
commandLine = cmdLineGnuParser.parse(options, actionArgs);
if (commandLine.hasOption("h"))
{
printUsage();
System.exit(0);
}
else
{
verbose = !commandLine.hasOption('q');
compareMode = commandLine.hasOption('f') ? CompareMode.FAST : CompareMode.FULL;
message = commandLine.getOptionValue('m', message);
threadCount = Integer.parseInt(commandLine.getOptionValue('t', "" + threadCount));
useLastState = commandLine.hasOption('l');
}
}
catch (Exception ex)
{
printUsage();
System.exit(-1);
}
if (compareMode == CompareMode.FAST)
{
threadCount = 1;
System.out.println("Using fast compare mode. Thread count forced to 1");
}
if (threadCount < 1)
{
System.out.println("Thread count must be at least one");
System.exit(0);
}
File baseDirectory = new File(".");
File stateDir = new File(StateGenerator.FIC_DIR, "states");
if (command == Command.INIT)
{
if (stateDir.exists())
{
System.out.println("fim repository already exist");
System.exit(0);
}
}
else
{
if (!stateDir.exists())
{
System.out.println("fim repository does not exist. Please run 'fim init' before.");
System.exit(-1);
}
}
State lastState;
State currentState;
StateGenerator generator = new StateGenerator(threadCount, compareMode);
StateManager manager = new StateManager(stateDir, compareMode);
StateComparator comparator = new StateComparator(compareMode);
DuplicateFinder finder = new DuplicateFinder();
switch (command)
{
case INIT:
fastCompareNotSupported(compareMode);
stateDir.mkdirs();
currentState = generator.generateState("Initial State", baseDirectory);
comparator.compare(null, currentState).displayChanges(verbose);
manager.createNewState(currentState);
break;
case COMMIT:
fastCompareNotSupported(compareMode);
lastState = manager.loadLastState();
currentState = generator.generateState(message, baseDirectory);
CompareResult result = comparator.compare(lastState, currentState).displayChanges(verbose);
if (result.somethingModified())
{
System.out.println("");
if (confirmCommand("commit"))
{
manager.createNewState(currentState);
}
else
{
System.out.println("Nothing committed");
}
}
break;
case DIFF:
lastState = manager.loadLastState();
currentState = generator.generateState(message, baseDirectory);
comparator.compare(lastState, currentState).displayChanges(verbose);
break;
case FIND_DUPLICATES:
fastCompareNotSupported(compareMode);
System.out.println("Searching for duplicated files" + (useLastState ? " from the last committed State" : ""));
System.out.println("");
State state;
if (useLastState)
{
state = manager.loadLastState();
}
else
{
state = generator.generateState(message, baseDirectory);
}
finder.findDuplicates(state).displayDuplicates(verbose);
break;
case RESET_DATES:
fastCompareNotSupported(compareMode);
lastState = manager.loadLastState();
manager.resetDates(lastState);
break;
case LOG:
manager.displayStatesLog();
break;
}
}
|
#vulnerable code
public static void main(String[] args) throws IOException
{
String[] filteredArgs = filterEmptyArgs(args);
if (filteredArgs.length < 1)
{
youMustSpecifyACommandToRun();
}
Command command = Command.fromName(filteredArgs[0]);
if (command == null)
{
youMustSpecifyACommandToRun();
}
CommandLineParser cmdLineGnuParser = new GnuParser();
Options options = constructOptions();
CommandLine commandLine;
boolean verbose = true;
CompareMode compareMode = CompareMode.FULL;
String message = "";
boolean useLastState = false;
int threadCount = Runtime.getRuntime().availableProcessors();
try
{
String[] actionArgs = Arrays.copyOfRange(filteredArgs, 1, filteredArgs.length);
commandLine = cmdLineGnuParser.parse(options, actionArgs);
if (commandLine.hasOption("h"))
{
printUsage();
System.exit(0);
}
else
{
verbose = !commandLine.hasOption('q');
compareMode = commandLine.hasOption('f') ? CompareMode.FAST : CompareMode.FULL;
message = commandLine.getOptionValue('m', message);
threadCount = Integer.parseInt(commandLine.getOptionValue('t', "" + threadCount));
useLastState = commandLine.hasOption('l');
}
}
catch (Exception ex)
{
printUsage();
System.exit(-1);
}
if (compareMode == CompareMode.FAST)
{
threadCount = 1;
System.out.println("Using fast compare mode. Thread count forced to 1");
}
if (threadCount < 1)
{
System.out.println("Thread count must be at least one");
System.exit(0);
}
File baseDirectory = new File(".");
File stateDir = new File(StateGenerator.FIC_DIR, "states");
if (command == Command.INIT)
{
if (stateDir.exists())
{
System.out.println("fim repository already exist");
System.exit(0);
}
}
else
{
if (!stateDir.exists())
{
System.out.println("fim repository does not exist. Please run 'fim init' before.");
System.exit(-1);
}
}
State previousState;
State currentState;
StateGenerator generator = new StateGenerator(threadCount, compareMode);
StateManager manager = new StateManager(stateDir, compareMode);
StateComparator comparator = new StateComparator(compareMode);
DuplicateFinder finder = new DuplicateFinder();
switch (command)
{
case INIT:
fastCompareNotSupported(compareMode);
stateDir.mkdirs();
currentState = generator.generateState("Initial State", baseDirectory);
comparator.compare(null, currentState).displayChanges(verbose);
manager.createNewState(currentState);
break;
case COMMIT:
fastCompareNotSupported(compareMode);
previousState = manager.loadPreviousState();
currentState = generator.generateState(message, baseDirectory);
CompareResult result = comparator.compare(previousState, currentState).displayChanges(verbose);
if (result.somethingModified())
{
System.out.println("");
if (confirmCommand("commit"))
{
manager.createNewState(currentState);
}
else
{
System.out.println("Nothing committed");
}
}
break;
case DIFF:
previousState = manager.loadPreviousState();
currentState = generator.generateState(message, baseDirectory);
comparator.compare(previousState, currentState).displayChanges(verbose);
break;
case FIND_DUPLICATES:
fastCompareNotSupported(compareMode);
System.out.println("Searching for duplicated files" + (useLastState ? " from the last committed State" : ""));
System.out.println("");
State state;
if (useLastState)
{
state = manager.loadPreviousState();
}
else
{
state = generator.generateState(message, baseDirectory);
}
finder.findDuplicates(state).displayDuplicates(verbose);
break;
case RESET_DATES:
fastCompareNotSupported(compareMode);
previousState = manager.loadPreviousState();
manager.resetDates(previousState);
break;
case LOG:
manager.displayStatesLog();
break;
}
}
#location 124
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public int getNumberOfFeatures(){
List<? extends HasArray> coefs = getCoefs();
return NeuralNetworkUtil.getNumberOfFeatures(coefs);
}
|
#vulnerable code
@Override
public int getNumberOfFeatures(){
List<?> coefs = getCoefs();
NDArray input = (NDArray)coefs.get(0);
int[] shape = NDArrayUtil.getShape(input);
return shape[0];
}
#location 5
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private List<? extends Number> getNodeAttribute(String key){
List<? extends Number> nodeAttributes = (List<? extends Number>)ClassDictUtil.getArray(this, "nodes", key);
return nodeAttributes;
}
|
#vulnerable code
private List<? extends Number> getNodeAttribute(String key){
NDArrayWrapper nodes = (NDArrayWrapper)get("nodes");
Map<String, ?> content = (Map<String, ?>)nodes.getContent();
return (List<? extends Number>)content.get(key);
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public Expression encode(int index, FieldName name){
Expression expression = new FieldRef(name);
if(getWithMean()){
Number mean = Iterables.get(getMean(), index);
if(Double.compare(mean.doubleValue(), 0d) != 0){
expression = PMMLUtil.createApply("-", expression, PMMLUtil.createConstant(mean));
}
} // End if
if(gwtWithStd()){
Number std = Iterables.get(getStd(), index);
if(Double.compare(std.doubleValue(), 1d) != 0){
expression = PMMLUtil.createApply("/", expression, PMMLUtil.createConstant(std));
}
}
// "($name - mean) / std"
return expression;
}
|
#vulnerable code
@Override
public Expression encode(int index, FieldName name){
Expression expression = new FieldRef(name);
if(withMean()){
Number mean = Iterables.get(getMean(), index);
if(Double.compare(mean.doubleValue(), 0d) != 0){
expression = PMMLUtil.createApply("-", expression, PMMLUtil.createConstant(mean));
}
} // End if
if(withStd()){
Number std = Iterables.get(getStd(), index);
if(Double.compare(std.doubleValue(), 1d) != 0){
expression = PMMLUtil.createApply("/", expression, PMMLUtil.createConstant(std));
}
}
// "($name - mean) / std"
return expression;
}
#location 5
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
protected Object[] getEstimatorStep(){
List<Object[]> steps = getSteps();
if(steps == null || steps.size() < 1){
throw new IllegalArgumentException("Missing estimator step");
}
return steps.get(steps.size() - 1);
}
|
#vulnerable code
protected Object[] getEstimatorStep(){
List<Object[]> steps = getSteps();
if(steps.size() < 1){
throw new IllegalArgumentException("Missing estimator step");
}
return steps.get(steps.size() - 1);
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public List<Object[]> getTransformerSteps(){
List<Object[]> steps = getSteps();
if(steps == null || steps.size() < 1){
throw new IllegalArgumentException("Missing estimator step");
}
return steps.subList(0, steps.size() - 1);
}
|
#vulnerable code
public List<Object[]> getTransformerSteps(){
List<Object[]> steps = getSteps();
if(steps.size() < 1){
throw new IllegalArgumentException("Missing estimator step");
}
return steps.subList(0, steps.size() - 1);
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public int getNumberOfFeatures(){
int[] shape = getCoefShape();
if(shape.length != 1){
throw new IllegalArgumentException();
}
return shape[0];
}
|
#vulnerable code
@Override
public int getNumberOfFeatures(){
return (Integer)get("rank_");
}
#location 3
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
static
Expression encodeUFunc(UFunc ufunc, FieldRef fieldRef){
String name = ufunc.getName();
switch(name){
case "absolute":
return PMMLUtil.createApply("abs", fieldRef);
case "ceil":
case "exp":
case "floor":
return PMMLUtil.createApply(name, fieldRef);
case "log":
return PMMLUtil.createApply("ln", fieldRef);
case "log10":
return PMMLUtil.createApply(name, fieldRef);
case "negative":
return PMMLUtil.createApply("*", PMMLUtil.createConstant(-1), fieldRef);
case "reciprocal":
return PMMLUtil.createApply("/", PMMLUtil.createConstant(1), fieldRef);
case "rint":
return PMMLUtil.createApply("round", fieldRef);
case "sign":
return PMMLUtil.createApply("if", PMMLUtil.createApply("lessThan", fieldRef, PMMLUtil.createConstant(0)),
PMMLUtil.createConstant(-1), // x < 0
PMMLUtil.createApply("if", PMMLUtil.createApply("greaterThan", fieldRef, PMMLUtil.createConstant(0)),
PMMLUtil.createConstant(+1), // x > 0
PMMLUtil.createConstant(0) // x == 0
)
);
case "sqrt":
return PMMLUtil.createApply(name, fieldRef);
case "square":
return PMMLUtil.createApply("*", fieldRef, fieldRef);
default:
throw new IllegalArgumentException(name);
}
}
|
#vulnerable code
static
Expression encodeUFunc(UFunc ufunc, FieldRef fieldRef){
String module = ufunc.getModule();
String name = ufunc.getName();
switch(module){
case "numpy":
case "numpy.core.numeric":
case "numpy.lib.function_base":
break;
default:
throw new IllegalArgumentException(module);
}
switch(name){
case "absolute":
return PMMLUtil.createApply("abs", fieldRef);
case "ceil":
case "exp":
case "floor":
return PMMLUtil.createApply(name, fieldRef);
case "log":
return PMMLUtil.createApply("ln", fieldRef);
case "log10":
return PMMLUtil.createApply(name, fieldRef);
case "negative":
return PMMLUtil.createApply("*", PMMLUtil.createConstant(-1), fieldRef);
case "reciprocal":
return PMMLUtil.createApply("/", PMMLUtil.createConstant(1), fieldRef);
case "rint":
return PMMLUtil.createApply("round", fieldRef);
case "sign":
return PMMLUtil.createApply("if", PMMLUtil.createApply("lessThan", fieldRef, PMMLUtil.createConstant(0)),
PMMLUtil.createConstant(-1), // x < 0
PMMLUtil.createApply("if", PMMLUtil.createApply("greaterThan", fieldRef, PMMLUtil.createConstant(0)),
PMMLUtil.createConstant(+1), // x > 0
PMMLUtil.createConstant(0) // x == 0
)
);
case "sqrt":
return PMMLUtil.createApply(name, fieldRef);
case "square":
return PMMLUtil.createApply("*", fieldRef, fieldRef);
default:
throw new IllegalArgumentException(name);
}
}
#location 6
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
static
private InputStream init(PushbackInputStream is) throws IOException {
byte[] magic = new byte[2];
ByteStreams.readFully(is, magic);
is.unread(magic);
// Joblib 0.10.0 and newer
if(magic[0] == 'x'){
return initZlib(is);
} else
// Joblib 0.9.4 and earlier
if(magic[0] == 'Z' && magic[1] == 'F'){
return initCompat(is);
} else
{
throw new IOException();
}
}
|
#vulnerable code
static
private InputStream init(PushbackInputStream is) throws IOException {
byte[] headerBytes = new byte[2 + 19];
ByteStreams.readFully(is, headerBytes);
String header = new String(headerBytes);
if(!header.startsWith("ZF0x")){
throw new IOException();
}
// Remove trailing whitespace
header = header.trim();
final
long expectedSize = Long.parseLong(header.substring(4), 16);
// Consume the first byte
int firstByte = is.read();
if(firstByte < 0){
return is;
} // End if
// If the first byte is not a space character, then make it available for reading again
if(firstByte != '\u0020'){
is.unread(firstByte);
}
InflaterInputStream zlibIs = new InflaterInputStream(is);
InputStream result = new FilterInputStream(new CountingInputStream(zlibIs)){
private boolean closed = false;
@Override
public void close() throws IOException {
if(this.closed){
return;
}
this.closed = true;
long size = ((CountingInputStream)super.in).getCount();
super.close();
if(size != expectedSize){
throw new IOException("Expected " + expectedSize + " bytes of uncompressed data, got " + size + " bytes");
}
}
};
return result;
}
#location 32
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public int getNumberOfFeatures(){
return ValueUtil.asInteger((Number)get("n_features"));
}
|
#vulnerable code
@Override
public int getNumberOfFeatures(){
return (Integer)get("n_features");
}
#location 3
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private Object loadContent(){
Object[] shape = getShape();
Object descr = getDescr();
byte[] data = (byte[])getData();
if(descr instanceof DType){
DType dType = (DType)descr;
descr = dType.toDescr();
}
try {
InputStream is = new ByteArrayInputStream(data);
try {
return NDArrayUtil.parseData(is, descr, shape);
} finally {
is.close();
}
} catch(IOException ioe){
throw new RuntimeException(ioe);
}
}
|
#vulnerable code
private Object loadContent(){
Object[] shape = getShape();
Object descr = getDescr();
byte[] data = (byte[])getData();
try {
InputStream is = new ByteArrayInputStream(data);
try {
return NDArrayUtil.parseData(is, descr, shape);
} finally {
is.close();
}
} catch(IOException ioe){
throw new RuntimeException(ioe);
}
}
#location 10
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public List<Feature> encodeFeatures(List<String> ids, List<Feature> features, SkLearnEncoder encoder){
List<?> data = getData();
ClassDictUtil.checkSize(1, ids, features);
final
InvalidValueTreatmentMethod invalidValueTreatment = DomainUtil.parseInvalidValueTreatment(getInvalidValueTreatment());
WildcardFeature wildcardFeature = (WildcardFeature)features.get(0);
Function<Object, String> function = new Function<Object, String>(){
@Override
public String apply(Object object){
return ValueUtil.formatValue(object);
}
};
List<String> categories = Lists.transform(data, function);
FieldDecorator decorator = new ValidValueDecorator(){
{
setInvalidValueTreatment(invalidValueTreatment);
}
};
CategoricalFeature categoricalFeature = wildcardFeature.toCategoricalFeature(categories);
encoder.addDecorator(categoricalFeature.getName(), decorator);
return Collections.<Feature>singletonList(categoricalFeature);
}
|
#vulnerable code
@Override
public List<Feature> encodeFeatures(List<String> ids, List<Feature> features, SkLearnEncoder encoder){
List<?> data = getData();
if(ids.size() != 1 || features.size() != 1){
throw new IllegalArgumentException();
}
final
InvalidValueTreatmentMethod invalidValueTreatment = DomainUtil.parseInvalidValueTreatment(getInvalidValueTreatment());
WildcardFeature wildcardFeature = (WildcardFeature)features.get(0);
Function<Object, String> function = new Function<Object, String>(){
@Override
public String apply(Object object){
return ValueUtil.formatValue(object);
}
};
List<String> categories = Lists.transform(data, function);
FieldDecorator decorator = new ValidValueDecorator(){
{
setInvalidValueTreatment(invalidValueTreatment);
}
};
CategoricalFeature categoricalFeature = wildcardFeature.toCategoricalFeature(categories);
encoder.addDecorator(categoricalFeature.getName(), decorator);
return Collections.<Feature>singletonList(categoricalFeature);
}
#location 10
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public int getNumberOfFeatures(){
List<? extends HasArray> coefs = getCoefs();
return NeuralNetworkUtil.getNumberOfFeatures(coefs);
}
|
#vulnerable code
@Override
public int getNumberOfFeatures(){
List<?> coefs = getCoefs();
NDArray input = (NDArray)coefs.get(0);
int[] shape = NDArrayUtil.getShape(input);
return shape[0];
}
#location 5
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public ContinuousOutputFeature toContinuousFeature(){
PMMLEncoder encoder = ensureEncoder();
Output output = getOutput();
OutputField outputField = getField();
DataType dataType = outputField.getDataType();
switch(dataType){
case INTEGER:
case FLOAT:
case DOUBLE:
break;
default:
throw new IllegalArgumentException();
}
outputField.setOpType(OpType.CONTINUOUS);
return new ContinuousOutputFeature(encoder, output, outputField);
}
|
#vulnerable code
@Override
public ContinuousOutputFeature toContinuousFeature(){
PMMLEncoder encoder = ensureEncoder();
Output output = getOutput();
OutputField outputField = OutputUtil.getOutputField(output, getName());
DataType dataType = outputField.getDataType();
switch(dataType){
case INTEGER:
case FLOAT:
case DOUBLE:
break;
default:
throw new IllegalArgumentException();
}
outputField.setOpType(OpType.CONTINUOUS);
return new ContinuousOutputFeature(encoder, output, outputField);
}
#location 9
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public int getNumberOfFeatures(){
return ValueUtil.asInteger((Number)get("n_features"));
}
|
#vulnerable code
@Override
public int getNumberOfFeatures(){
return (Integer)get("n_features");
}
#location 3
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private List<?> loadContent(){
DType dtype = getDType();
byte[] obj = getObj();
try {
InputStream is = new ByteArrayInputStream(obj);
try {
return (List<?>)NDArrayUtil.parseData(is, dtype, new Object[0]);
} finally {
is.close();
}
} catch(IOException ioe){
throw new RuntimeException(ioe);
}
}
|
#vulnerable code
private List<?> loadContent(){
DType dtype = getDType();
byte[] obj = getObj();
try {
InputStream is = new ByteArrayInputStream(obj);
try {
return (List<?>)NDArrayUtil.parseData(is, dtype.toDescr(), new Object[0]);
} finally {
is.close();
}
} catch(IOException ioe){
throw new RuntimeException(ioe);
}
}
#location 9
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public List<Feature> encodeFeatures(List<String> ids, List<Feature> features, SkLearnEncoder encoder){
int[] shape = getComponentsShape();
int numberOfComponents = shape[0];
int numberOfFeatures = shape[1];
List<? extends Number> components = getComponents();
List<? extends Number> mean = getMean();
ClassDictUtil.checkSize(numberOfFeatures, ids, features, mean);
Boolean whiten = getWhiten();
List<? extends Number> explainedVariance = (whiten ? getExplainedVariance() : null);
ClassDictUtil.checkSize(numberOfComponents, explainedVariance);
String id = String.valueOf(PCA.SEQUENCE.getAndIncrement());
ids.clear();
List<Feature> result = new ArrayList<>();
for(int i = 0; i < numberOfComponents; i++){
List<? extends Number> component = MatrixUtil.getRow(components, numberOfComponents, numberOfFeatures, i);
Apply apply = new Apply("sum");
for(int j = 0; j < numberOfFeatures; j++){
Feature feature = features.get(j);
// "($name[i] - mean[i]) * component[i]"
Expression expression = (feature.toContinuousFeature()).ref();
Number meanValue = mean.get(j);
if(!ValueUtil.isZero(meanValue)){
expression = PMMLUtil.createApply("-", expression, PMMLUtil.createConstant(meanValue));
}
Number componentValue = component.get(j);
if(!ValueUtil.isOne(componentValue)){
expression = PMMLUtil.createApply("*", expression, PMMLUtil.createConstant(componentValue));
}
apply.addExpressions(expression);
}
if(whiten){
Number explainedVarianceValue = explainedVariance.get(i);
if(!ValueUtil.isOne(explainedVarianceValue)){
apply = PMMLUtil.createApply("/", apply, PMMLUtil.createConstant(Math.sqrt(ValueUtil.asDouble(explainedVarianceValue))));
}
}
DerivedField derivedField = encoder.createDerivedField(createName(id, i), apply);
ids.add((derivedField.getName()).getValue());
result.add(new ContinuousFeature(encoder, derivedField));
}
return result;
}
|
#vulnerable code
@Override
public List<Feature> encodeFeatures(List<String> ids, List<Feature> features, SkLearnEncoder encoder){
int[] shape = getComponentsShape();
int numberOfComponents = shape[0];
int numberOfFeatures = shape[1];
if(ids.size() != numberOfFeatures || features.size() != numberOfFeatures){
throw new IllegalArgumentException();
}
String id = String.valueOf(PCA.SEQUENCE.getAndIncrement());
List<? extends Number> components = getComponents();
List<? extends Number> mean = getMean();
Boolean whiten = getWhiten();
List<? extends Number> explainedVariance = (whiten ? getExplainedVariance() : null);
ids.clear();
List<Feature> result = new ArrayList<>();
for(int i = 0; i < numberOfComponents; i++){
List<? extends Number> component = MatrixUtil.getRow(components, numberOfComponents, numberOfFeatures, i);
Apply apply = new Apply("sum");
for(int j = 0; j < numberOfFeatures; j++){
Feature feature = features.get(j);
// "($name[i] - mean[i]) * component[i]"
Expression expression = (feature.toContinuousFeature()).ref();
Number meanValue = mean.get(j);
if(!ValueUtil.isZero(meanValue)){
expression = PMMLUtil.createApply("-", expression, PMMLUtil.createConstant(meanValue));
}
Number componentValue = component.get(j);
if(!ValueUtil.isOne(componentValue)){
expression = PMMLUtil.createApply("*", expression, PMMLUtil.createConstant(componentValue));
}
apply.addExpressions(expression);
}
if(whiten){
Number explainedVarianceValue = explainedVariance.get(i);
if(!ValueUtil.isOne(explainedVarianceValue)){
apply = PMMLUtil.createApply("/", apply, PMMLUtil.createConstant(Math.sqrt(ValueUtil.asDouble(explainedVarianceValue))));
}
}
DerivedField derivedField = encoder.createDerivedField(createName(id, i), apply);
ids.add((derivedField.getName()).getValue());
result.add(new ContinuousFeature(encoder, derivedField));
}
return result;
}
#location 19
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public List<Feature> encodeFeatures(List<Feature> features, SkLearnEncoder encoder){
String function = getFunction();
if(features.size() <= 1){
return features;
}
Apply apply = new Apply(translateFunction(function));
for(Feature feature : features){
apply.addExpressions(feature.ref());
}
FieldName name = FieldName.create(function + "(" + FeatureUtil.formatFeatureList(features) + ")"); // XXX
DerivedField derivedField = encoder.createDerivedField(name, OpType.CONTINUOUS, DataType.DOUBLE, apply);
return Collections.<Feature>singletonList(new ContinuousFeature(encoder, derivedField));
}
|
#vulnerable code
@Override
public List<Feature> encodeFeatures(List<Feature> features, SkLearnEncoder encoder){
String function = translateFunction(getFunction());
if(features.size() <= 1){
return features;
}
FieldName name = FieldName.create(function + "(" + FeatureUtil.formatFeatureList(features) + ")");
Apply apply = new Apply(function);
for(Feature feature : features){
apply.addExpressions(feature.ref());
}
DerivedField derivedField = encoder.createDerivedField(name, OpType.CONTINUOUS, DataType.DOUBLE, apply);
return Collections.<Feature>singletonList(new ContinuousFeature(encoder, derivedField));
}
#location 3
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
static
public List<?> getArray(ClassDict dict, String name, String key){
Object object = dict.get(name);
if(object instanceof NDArrayWrapper){
NDArrayWrapper arrayWrapper = (NDArrayWrapper)object;
object = arrayWrapper.getContent();
} // End if
if(object instanceof NDArray){
NDArray array = (NDArray)object;
return NDArrayUtil.getContent(array, key);
}
throw new IllegalArgumentException("The value of the " + ClassDictUtil.formatMember(dict, name) + " attribute (" + ClassDictUtil.formatClass(object) + ") is not a supported array type");
}
|
#vulnerable code
static
public List<?> getArray(ClassDict dict, String name, String key){
Object object = unwrap(dict.get(name));
if(object instanceof NDArray){
NDArray array = (NDArray)object;
return NDArrayUtil.getContent(array, key);
}
throw new IllegalArgumentException("The value of the " + ClassDictUtil.formatMember(dict, name) + " attribute (" + ClassDictUtil.formatClass(object) + ") is not a supported array type");
}
#location 3
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public int getNumberOfFeatures(){
return ValueUtil.asInteger((Number)get("n_features_"));
}
|
#vulnerable code
public int getNumberOfFeatures(){
return (Integer)get("n_features_");
}
#location 2
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public List<Transformer> getTransformers(){
List<Object[]> steps = getSteps();
return TransformerUtil.asTransformerList(TupleUtil.extractElementList(steps, 1));
}
|
#vulnerable code
public List<Transformer> getTransformers(){
List<Object[]> steps = getSteps();
boolean flexible = isFlexible();
if(flexible && steps.size() > 0){
Estimator estimator = getEstimator();
if(estimator != null){
steps = steps.subList(0, steps.size() - 1);
}
}
return TransformerUtil.asTransformerList(TupleUtil.extractElementList(steps, 1));
}
#location 5
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
static
public List<?> getArray(ClassDict dict, String name){
Object object = dict.get(name);
if(object instanceof HasArray){
HasArray hasArray = (HasArray)object;
return hasArray.getArrayContent();
} // End if
if(object instanceof Number){
return Collections.singletonList(object);
}
throw new IllegalArgumentException("The value of the " + ClassDictUtil.formatMember(dict, name) + " attribute (" + ClassDictUtil.formatClass(object) + ") is not a supported array type");
}
|
#vulnerable code
static
public List<?> getArray(ClassDict dict, String name){
Object object = unwrap(dict.get(name));
if(object instanceof NDArray){
NDArray array = (NDArray)object;
return NDArrayUtil.getContent(array);
} else
if(object instanceof CSRMatrix){
CSRMatrix matrix = (CSRMatrix)object;
return CSRMatrixUtil.getContent(matrix);
} else
if(object instanceof Scalar){
Scalar scalar = (Scalar)object;
return scalar.getContent();
} // End if
if(object instanceof Number){
return Collections.singletonList(object);
}
throw new IllegalArgumentException("The value of the " + ClassDictUtil.formatMember(dict, name) + " attribute (" + ClassDictUtil.formatClass(object) + ") is not a supported array type");
}
#location 3
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public List<?> getClasses(){
LabelEncoder labelEncoder = getLabelEncoder();
return labelEncoder.getClasses();
}
|
#vulnerable code
@Override
public List<?> getClasses(){
List<Object> result = new ArrayList<>();
List<?> values = (List<?>)get("classes_");
for(Object value : values){
if(value instanceof HasArray){
HasArray hasArray = (HasArray)value;
result.addAll(hasArray.getArrayContent());
} else
{
result.add(value);
}
}
return result;
}
#location 6
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public double[] getValues(){
List<? extends Number> values = (List<? extends Number>)ClassDictUtil.getArray(this, "values");
return Doubles.toArray(values);
}
|
#vulnerable code
public double[] getValues(){
NDArrayWrapper values = (NDArrayWrapper)get("values");
return Doubles.toArray((List<? extends Number>)values.getContent());
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
static
public MiningModel encodeBooster(HasBooster hasBooster, Schema schema){
Booster booster = hasBooster.getBooster();
Learner learner = booster.getLearner();
Schema xgbSchema = XGBoostUtil.toXGBoostSchema(schema);
// XXX
List<Feature> features = xgbSchema.getFeatures();
for(Feature feature : features){
if(feature instanceof ContinuousFeature){
SkLearnEncoder encoder = (SkLearnEncoder)feature.getEncoder();
TypeDefinitionField field = encoder.getField(feature.getName());
if(!(OpType.CONTINUOUS).equals(field.getOpType())){
field.setOpType(OpType.CONTINUOUS);
}
}
}
MiningModel miningModel = learner.encodeMiningModel(xgbSchema);
return miningModel;
}
|
#vulnerable code
static
public MiningModel encodeBooster(HasBooster hasBooster, Schema schema){
Booster booster = hasBooster.getBooster();
Learner learner = booster.getLearner();
Schema xgbSchema = XGBoostUtil.toXGBoostSchema(schema);
MiningModel miningModel = learner.encodeMiningModel(xgbSchema);
return miningModel;
}
#location 5
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public ServiceMessage decodeData(ServiceMessage message, Class type) {
if (message.data() != null && message.data() instanceof ByteBuf) {
try (ByteBufInputStream inputStream = new ByteBufInputStream(message.data(), true)) {
return ServiceMessage.from(message).data(readFrom(inputStream, type)).build();
} catch (Throwable ex) {
LOGGER.error("Failed to deserialize data", ex);
}
}
return message;
}
|
#vulnerable code
@Override
public ServiceMessage decodeData(ServiceMessage message, Class type) {
if (message.data() != null && message.data() instanceof ByteBuf) {
ByteBufInputStream inputStream = new ByteBufInputStream(message.data());
try {
return ServiceMessage.from(message).data(readFrom(inputStream, type)).build();
} catch (Throwable ex) {
LOGGER.error("Failed to deserialize data", ex);
}
}
return message;
}
#location 6
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private void importRelationshipIndexes(File file, String indexName, String indexType) throws IOException {
BatchInserterIndex index;
if (indexType.equals("fulltext")) {
index = lucene.relationshipIndex( indexName, FULLTEXT_CONFIG );
} else {
index = lucene.relationshipIndex( indexName, EXACT_CONFIG );
}
BufferedReader bf = new BufferedReader(new FileReader(file));
final Data data = new Data(bf.readLine(), "\t", 1);
Object[] rel = new Object[1];
String line;
report.reset();
while ((line = bf.readLine()) != null) {
final Map<String, Object> properties = data.update(line, rel);
index.add(id(rel[0]), properties);
report.dots();
}
report.finishImport("Relationships into " + indexName + " Index");
}
|
#vulnerable code
private void importRelationshipIndexes(File file, String indexName, String indexType) throws IOException {
BatchInserterIndex index;
if (indexType.equals("fulltext")) {
index = lucene.relationshipIndex( indexName, stringMap( "type", "fulltext" ) );
} else {
index = lucene.relationshipIndex( indexName, EXACT_CONFIG );
}
BufferedReader bf = new BufferedReader(new FileReader(file));
final Data data = new Data(bf.readLine(), "\t", 1);
Object[] rel = new Object[1];
String line;
report.reset();
while ((line = bf.readLine()) != null) {
final Map<String, Object> properties = map(data.update(line, rel));
index.add(id(rel[0]), properties);
report.dots();
}
report.finishImport("Relationships into " + indexName + " Index");
}
#location 7
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private void importRelationships(File file) throws IOException {
BufferedReader bf = new BufferedReader(new FileReader(file));
final Data data = new Data(bf.readLine(), "\t", 3);
Object[] rel = new Object[3];
final RelType relType = new RelType();
String line;
report.reset();
while ((line = bf.readLine()) != null) {
final Map<String, Object> properties = data.update(line, rel);
db.createRelationship(id(rel[0]), id(rel[1]), relType.update(rel[2]), properties);
report.dots();
}
report.finishImport("Relationships");
}
|
#vulnerable code
private void importRelationships(File file) throws IOException {
BufferedReader bf = new BufferedReader(new FileReader(file));
final Data data = new Data(bf.readLine(), "\t", 3);
Object[] rel = new Object[3];
final Type type = new Type();
String line;
report.reset();
while ((line = bf.readLine()) != null) {
final Map<String, Object> properties = map(data.update(line, rel));
db.createRelationship(id(rel[0]), id(rel[1]), type.update(rel[2]), properties);
report.dots();
}
report.finishImport("Relationships");
}
#location 12
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static void upZipFile(File zipFile, String folderPath) throws ZipException, IOException {
File desDir = new File(folderPath);
if (!desDir.exists()) {
if (!desDir.mkdirs()) {
System.out.println("was not successful.");
}
}
ZipFile zf = new ZipFile(zipFile);
for (Enumeration<?> entries = zf.entries(); entries.hasMoreElements(); ) {
ZipEntry entry = ((ZipEntry) entries.nextElement());
InputStream in = zf.getInputStream(entry);
String str = folderPath;
File desFile = new File(str, java.net.URLEncoder.encode(entry.getName(), "UTF-8"));
if (!desFile.exists()) {
File fileParentDir = desFile.getParentFile();
if (!fileParentDir.exists()) {
if (!fileParentDir.mkdirs()) {
System.out.println("was not successful.");
}
}
}
OutputStream out = new FileOutputStream(desFile);
byte[] buffer = new byte[1024 * 1024];
int realLength = in.read(buffer);
while (realLength != -1) {
out.write(buffer, 0, realLength);
realLength = in.read(buffer);
}
out.close();
in.close();
}
}
|
#vulnerable code
public static void upZipFile(File zipFile, String folderPath) throws ZipException, IOException {
File desDir = new File(folderPath);
if (!desDir.exists()) {
desDir.mkdirs();
}
ZipFile zf = new ZipFile(zipFile);
for (Enumeration<?> entries = zf.entries(); entries.hasMoreElements(); ) {
ZipEntry entry = ((ZipEntry) entries.nextElement());
InputStream in = zf.getInputStream(entry);
String str = folderPath;
File desFile = new File(str, java.net.URLEncoder.encode(entry.getName(), "UTF-8"));
if (!desFile.exists()) {
File fileParentDir = desFile.getParentFile();
if (!fileParentDir.exists()) {
fileParentDir.mkdirs();
}
}
OutputStream out = new FileOutputStream(desFile);
byte[] buffer = new byte[1024 * 1024];
int realLength = in.read(buffer);
while (realLength != -1) {
out.write(buffer, 0, realLength);
realLength = in.read(buffer);
}
out.close();
in.close();
}
}
#location 5
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test()
public void testSizeControl() throws IOException, InterruptedException, ExecutionException {
runSizeControl("scaling-avl.tsv", new AvlDigestFactory());
}
|
#vulnerable code
@Test()
public void testSizeControl() throws IOException, InterruptedException, ExecutionException {
// very slow running data generator. Don't want to run this normally. To run slow tests use
// mvn test -DrunSlowTests=true
assumeTrue(Boolean.parseBoolean(System.getProperty("runSlowTests")));
final Random gen0 = RandomUtils.getRandom();
final PrintWriter out = new PrintWriter(new FileOutputStream("scaling.tsv"));
out.printf("k\tsamples\tcompression\tsize1\tsize2\n");
List<Callable<String>> tasks = Lists.newArrayList();
for (int k = 0; k < 20; k++) {
for (final int size : new int[]{10, 100, 1000, 10000}) {
final int currentK = k;
tasks.add(new Callable<String>() {
Random gen = new Random(gen0.nextLong());
@Override
public String call() throws Exception {
System.out.printf("Starting %d,%d\n", currentK, size);
StringWriter s = new StringWriter();
PrintWriter out = new PrintWriter(s);
for (double compression : new double[]{2, 5, 10, 20, 50, 100, 200, 500, 1000}) {
AVLTreeDigest dist = new AVLTreeDigest(compression);
for (int i = 0; i < size * 1000; i++) {
dist.add(gen.nextDouble());
}
out.printf("%d\t%d\t%.0f\t%d\t%d\n", currentK, size, compression, dist.smallByteSize(), dist.byteSize());
out.flush();
}
out.close();
return s.toString();
}
});
}
}
for (Future<String> result : Executors.newFixedThreadPool(20).invokeAll(tasks)) {
out.write(result.get());
}
out.close();
}
#location 27
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
static void whitelistVerify(
final String remoteHost,
final WhitelistItem whitelistItem,
final Map<String, List<String>> headers,
final String postContent)
throws WhitelistException {
WhitelistHost whitelistHost = new WhitelistHost(whitelistItem.getHost());
if (HostVerifier.whitelistVerified(new WhitelistHost(remoteHost), whitelistHost)) {
if (whitelistItem.isHmacEnabled()) {
final Optional<StringCredentials> hmacKeyOpt =
CredentialsHelper.findCredentials(whitelistItem.getHmacCredentialId());
if (!hmacKeyOpt.isPresent()) {
throw new WhitelistException(
"Was unable to find secret text credential " + whitelistItem.getHmacCredentialId());
}
final String hmacHeader = whitelistItem.getHmacHeader();
final String hmacKey = hmacKeyOpt.get().getSecret().getPlainText();
final String hmacAlgorithm = whitelistItem.getHmacAlgorithm();
hmacVerify(headers, postContent, hmacHeader, hmacKey, hmacAlgorithm);
return;
}
return;
}
throw new WhitelistException(
"Sending host \"" + remoteHost + "\" was not matched by whitelist.");
}
|
#vulnerable code
static void whitelistVerify(
final String remoteHost,
final WhitelistItem whitelistItem,
final Map<String, List<String>> headers,
final String postContent)
throws WhitelistException {
String whitelistHost = whitelistItem.getHost();
if (HostVerifier.whitelistContains(remoteHost, whitelistHost)) {
if (whitelistItem.isHmacEnabled()) {
final Optional<StringCredentials> hmacKeyOpt =
CredentialsHelper.findCredentials(whitelistItem.getHmacCredentialId());
if (!hmacKeyOpt.isPresent()) {
throw new WhitelistException(
"Was unable to find secret text credential " + whitelistItem.getHmacCredentialId());
}
final String hmacHeader = whitelistItem.getHmacHeader();
final String hmacKey = hmacKeyOpt.get().getSecret().getPlainText();
final String hmacAlgorithm = whitelistItem.getHmacAlgorithm();
hmacVerify(headers, postContent, hmacHeader, hmacKey, hmacAlgorithm);
return;
}
return;
}
throw new WhitelistException(
"Sending host \"" + remoteHost + "\" was not matched by whitelist.");
}
#location 10
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
if (roundEnv.processingOver()) {
return false;
}
Set<? extends Element> jsonAnnotated = roundEnv.getElementsAnnotatedWith(jsonTypeElement);
if (!jsonAnnotated.isEmpty()) {
Map<String, StructInfo> structs = new HashMap<String, StructInfo>();
StringBuilder dsl = new StringBuilder();
dsl.append("module json {\n");
for (Element el : jsonAnnotated) {
findStructs(structs, el, "CompiledJson requires public no argument constructor");
}
findRelatedReferences(structs);
DslOptions options = new DslOptions();
options.namespace = namespace;
buildDsl(structs, dsl, options);
dsl.append("}");
if (dsl.length() < 20) {
return false;
}
String fullDsl = dsl.toString();
processingEnv.getMessager().printMessage(Diagnostic.Kind.NOTE, fullDsl);
String fileContent;
try {
fileContent = AnnotationCompiler.buildExternalJson(fullDsl, options.namespace, options.useJodaTime);
} catch (Exception e) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "DSL compilation error\n" + e.getMessage());
return false;
}
try {
JavaFileObject jfo = processingEnv.getFiler().createSourceFile("ExternalSerialization");
BufferedWriter bw = new BufferedWriter(jfo.openWriter());
bw.write(fileContent);
bw.close();
FileObject rfo = processingEnv.getFiler().createResource(StandardLocation.CLASS_OUTPUT, "", "META-INF/services/com.dslplatform.json.Configuration");
bw = new BufferedWriter(rfo.openWriter());
bw.write(options.namespace + ".json.ExternalSerialization");
bw.close();
} catch (IOException e) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Failed saving compiled json serialization files");
}
}
return false;
}
|
#vulnerable code
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
if (roundEnv.processingOver()) {
return false;
}
Set<? extends Element> jsonAnnotated = roundEnv.getElementsAnnotatedWith(jsonTypeElement);
if (!jsonAnnotated.isEmpty()) {
Map<String, StructInfo> structs = new HashMap<String, StructInfo>();
StringBuilder dsl = new StringBuilder();
dsl.append("module json {\n");
for (Element el : jsonAnnotated) {
findStructs(structs, el, "CompiledJson requires public no argument constructor");
}
findRelatedReferences(structs);
buildDsl(structs, dsl);
dsl.append("}");
if (dsl.length() < 20) {
return false;
}
String fullDsl = dsl.toString();
processingEnv.getMessager().printMessage(Diagnostic.Kind.NOTE, fullDsl);
String fileContent;
try {
fileContent = AnnotationCompiler.buildExternalJson(fullDsl, namespace, useJodaTime);
} catch (Exception e) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "DSL compilation error\n" + e.getMessage());
return false;
}
try {
JavaFileObject jfo = processingEnv.getFiler().createSourceFile("ExternalSerialization");
BufferedWriter bw = new BufferedWriter(jfo.openWriter());
bw.write(fileContent);
bw.close();
FileObject rfo = processingEnv.getFiler().createResource(StandardLocation.CLASS_OUTPUT, "", "META-INF/services/com.dslplatform.json.Configuration");
bw = new BufferedWriter(rfo.openWriter());
bw.write(namespace + ".json.ExternalSerialization");
bw.close();
} catch (IOException e) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Failed saving compiled json serialization files");
}
}
return false;
}
#location 27
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
if (roundEnv.processingOver() || annotations.isEmpty()) {
return false;
}
final DslJson<Object> dslJson = new DslJson<>(Settings.withRuntime().includeServiceLoader(getClass().getClassLoader()));
Set<Type> knownEncoders = dslJson.getRegisteredEncoders();
Set<Type> knownDecoders = dslJson.getRegisteredDecoders();
Set<String> allTypes = new HashSet<>();
for (Type t : knownEncoders) {
if (knownDecoders.contains(t)) {
allTypes.add(t.getTypeName());
}
}
final Analysis analysis = new Analysis(
processingEnv,
annotationUsage,
logLevel,
allTypes,
rawClass -> {
try {
Class<?> raw = Class.forName(rawClass);
return dslJson.canSerialize(raw) && dslJson.canDeserialize(raw);
} catch (Exception ignore) {
return false;
}
},
JsonIgnore,
NonNullable,
PropertyAlias,
JsonRequired,
Constructors,
Indexes,
unknownTypes,
false,
true,
true,
true);
Set<? extends Element> compiledJsons = roundEnv.getElementsAnnotatedWith(analysis.compiledJsonElement);
Set<? extends Element> jacksonCreators = withJackson && jacksonCreatorElement != null ? roundEnv.getElementsAnnotatedWith(jacksonCreatorElement) : new HashSet<>();
Set<? extends Element> jsonbCreators = withJsonb && jsonbCreatorElement != null ? roundEnv.getElementsAnnotatedWith(jsonbCreatorElement) : new HashSet<>();
if (!compiledJsons.isEmpty() || !jacksonCreators.isEmpty() || !jsonbCreators.isEmpty()) {
Set<? extends Element> jsonConverters = roundEnv.getElementsAnnotatedWith(analysis.converterElement);
List<String> configurations = analysis.processConverters(jsonConverters);
analysis.processAnnotation(analysis.compiledJsonType, compiledJsons);
if (!jacksonCreators.isEmpty() && jacksonCreatorType != null) {
analysis.processAnnotation(jacksonCreatorType, jacksonCreators);
}
if (!jsonbCreators.isEmpty() && jsonbCreatorType != null) {
analysis.processAnnotation(jsonbCreatorType, jsonbCreators);
}
Map<String, StructInfo> structs = analysis.analyze();
if (analysis.hasError()) {
return false;
}
final List<String> generatedFiles = new ArrayList<>();
final List<Element> originatingElements = new ArrayList<>();
for (Map.Entry<String, StructInfo> entry : structs.entrySet()) {
StructInfo structInfo = entry.getValue();
if (structInfo.type == ObjectType.CLASS && structInfo.attributes.isEmpty()) {
continue;
}
String classNamePath = findConverterName(entry.getValue());
try {
JavaFileObject converterFile = processingEnv.getFiler().createSourceFile(classNamePath, structInfo.element);
try (Writer writer = converterFile.openWriter()) {
buildCode(writer, entry.getKey(), structInfo, structs, allTypes);
generatedFiles.add(classNamePath);
originatingElements.add(structInfo.element);
} catch (IOException e) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR,
"Failed saving compiled json serialization file " + classNamePath);
}
} catch (IOException e) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR,
"Failed creating compiled json serialization file " + classNamePath);
}
}
if (configurationFileName != null) {
final List<String> allConfigurations = new ArrayList<>(configurations);
try {
FileObject configFile = processingEnv.getFiler()
.createSourceFile(configurationFileName, originatingElements.toArray(new Element[0]));
try (Writer writer = configFile.openWriter()) {
buildRootConfiguration(writer, configurationFileName, generatedFiles);
allConfigurations.add(configurationFileName);
} catch (Exception e) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR,
"Failed saving configuration file " + configurationFileName);
}
} catch (IOException e) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR,
"Failed creating configuration file " + configurationFileName);
}
saveToServiceConfigFile(allConfigurations);
}
}
return false;
}
|
#vulnerable code
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
if (roundEnv.processingOver() || annotations.isEmpty()) {
return false;
}
final DslJson<Object> dslJson = new DslJson<>(Settings.withRuntime().includeServiceLoader(getClass().getClassLoader()));
Set<Type> knownEncoders = dslJson.getRegisteredEncoders();
Set<Type> knownDecoders = dslJson.getRegisteredDecoders();
Set<String> allTypes = new HashSet<>();
for (Type t : knownEncoders) {
if (knownDecoders.contains(t)) {
allTypes.add(t.getTypeName());
}
}
final Analysis analysis = new Analysis(
processingEnv,
annotationUsage,
logLevel,
allTypes,
rawClass -> {
try {
Class<?> raw = Class.forName(rawClass);
return dslJson.canSerialize(raw) && dslJson.canDeserialize(raw);
} catch (Exception ignore) {
return false;
}
},
JsonIgnore,
NonNullable,
PropertyAlias,
JsonRequired,
Constructors,
Indexes,
unknownTypes,
false,
true,
true,
true);
Set<? extends Element> compiledJsons = roundEnv.getElementsAnnotatedWith(analysis.compiledJsonElement);
Set<? extends Element> jacksonCreators = withJackson && jacksonCreatorElement != null ? roundEnv.getElementsAnnotatedWith(jacksonCreatorElement) : new HashSet<>();
Set<? extends Element> jsonbCreators = withJsonb && jsonbCreatorElement != null ? roundEnv.getElementsAnnotatedWith(jsonbCreatorElement) : new HashSet<>();
if (!compiledJsons.isEmpty() || !jacksonCreators.isEmpty() || !jsonbCreators.isEmpty()) {
Set<? extends Element> jsonConverters = roundEnv.getElementsAnnotatedWith(analysis.converterElement);
List<String> configurations = analysis.processConverters(jsonConverters);
analysis.processAnnotation(analysis.compiledJsonType, compiledJsons);
if (!jacksonCreators.isEmpty() && jacksonCreatorType != null) {
analysis.processAnnotation(jacksonCreatorType, jacksonCreators);
}
if (!jsonbCreators.isEmpty() && jsonbCreatorType != null) {
analysis.processAnnotation(jsonbCreatorType, jsonbCreators);
}
Map<String, StructInfo> structs = analysis.analyze();
if (analysis.hasError()) {
return false;
}
final List<String> generatedFiles = new ArrayList<>();
final List<Element> originatingElements = new ArrayList<>();
for (Map.Entry<String, StructInfo> entry : structs.entrySet()) {
StructInfo structInfo = entry.getValue();
if (structInfo.type == ObjectType.CLASS && structInfo.attributes.isEmpty()) {
continue;
}
String classNamePath = findConverterName(entry.getValue());
try {
JavaFileObject converterFile = processingEnv.getFiler().createSourceFile(classNamePath, structInfo.element);
try (Writer writer = converterFile.openWriter()) {
buildCode(writer, entry.getKey(), structInfo, structs, allowInline, allTypes);
generatedFiles.add(classNamePath);
originatingElements.add(structInfo.element);
} catch (IOException e) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR,
"Failed saving compiled json serialization file " + classNamePath);
}
} catch (IOException e) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR,
"Failed creating compiled json serialization file " + classNamePath);
}
}
if (configurationFileName != null) {
final List<String> allConfigurations = new ArrayList<>(configurations);
try {
FileObject configFile = processingEnv.getFiler()
.createSourceFile(configurationFileName, originatingElements.toArray(new Element[0]));
try (Writer writer = configFile.openWriter()) {
buildRootConfiguration(writer, configurationFileName, generatedFiles);
allConfigurations.add(configurationFileName);
} catch (Exception e) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR,
"Failed saving configuration file " + configurationFileName);
}
} catch (IOException e) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR,
"Failed creating configuration file " + configurationFileName);
}
saveToServiceConfigFile(allConfigurations);
}
}
return false;
}
#location 70
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
if (roundEnv.processingOver()) {
return false;
}
Set<? extends Element> jsonAnnotated = roundEnv.getElementsAnnotatedWith(jsonTypeElement);
if (!jsonAnnotated.isEmpty()) {
Map<String, StructInfo> structs = new HashMap<String, StructInfo>();
CompileOptions options = new CompileOptions();
for (Element el : jsonAnnotated) {
findStructs(structs, options, el, "CompiledJson requires public no argument constructor");
}
findRelatedReferences(structs, options);
String dsl = buildDsl(structs, options);
if (options.hasError) {
return false;
}
processingEnv.getMessager().printMessage(Diagnostic.Kind.NOTE, dsl);
String fileContent;
try {
fileContent = AnnotationCompiler.buildExternalJson(dsl, options.toOptions(namespace));
} catch (Exception e) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "DSL compilation error\n" + e.getMessage());
return false;
}
try {
String className = namespace + ".json.ExternalSerialization";
Writer writer = processingEnv.getFiler().createSourceFile(className).openWriter();
writer.write(fileContent);
writer.close();
writer = processingEnv.getFiler().createResource(StandardLocation.CLASS_OUTPUT, "", CONFIG).openWriter();
writer.write(className);
writer.close();
} catch (IOException e) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Failed saving compiled json serialization files");
}
}
return false;
}
|
#vulnerable code
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
if (roundEnv.processingOver()) {
return false;
}
Set<? extends Element> jsonAnnotated = roundEnv.getElementsAnnotatedWith(jsonTypeElement);
if (!jsonAnnotated.isEmpty()) {
Map<String, StructInfo> structs = new HashMap<String, StructInfo>();
CompileOptions options = new CompileOptions();
for (Element el : jsonAnnotated) {
findStructs(structs, options, el, "CompiledJson requires public no argument constructor");
}
findRelatedReferences(structs, options);
String dsl = buildDsl(structs, options);
if (options.hasError) {
return false;
}
processingEnv.getMessager().printMessage(Diagnostic.Kind.NOTE, dsl);
String fileContent;
try {
fileContent = AnnotationCompiler.buildExternalJson(dsl, options.toOptions(namespace));
} catch (Exception e) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "DSL compilation error\n" + e.getMessage());
return false;
}
try {
JavaFileObject jfo = processingEnv.getFiler().createSourceFile("ExternalSerialization");
BufferedWriter bw = new BufferedWriter(jfo.openWriter());
bw.write(fileContent);
bw.close();
FileObject rfo = processingEnv.getFiler().createResource(StandardLocation.CLASS_OUTPUT, "", "META-INF/services/com.dslplatform.json.Configuration");
bw = new BufferedWriter(rfo.openWriter());
bw.write(namespace + ".json.ExternalSerialization");
bw.close();
} catch (IOException e) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Failed saving compiled json serialization files");
}
}
return false;
}
#location 38
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@ApiMethod(name = "processRegistrationResponse")
public List<String> processRegistrationResponse(
@Named("responseData") String responseData, User user)
throws OAuthRequestException, ResponseException {
if (user == null) {
throw new OAuthRequestException("User is not authenticated");
}
Gson gson = new Gson();
JsonElement element = gson.fromJson(responseData, JsonElement.class);
JsonObject object = element.getAsJsonObject();
String clientDataJSON = object.get("clientDataJSON").getAsString();
String attestationObject = object.get("attestationObject").getAsString();
AuthenticatorAttestationResponse attestation =
new AuthenticatorAttestationResponse(clientDataJSON, attestationObject);
// TODO
String credentialId = BaseEncoding.base64Url().encode(
attestation.getAttestationObject().getAuthenticatorData().getAttData().getCredentialId());
String type = null;
String session = null;
PublicKeyCredential cred = new PublicKeyCredential(credentialId, type,
BaseEncoding.base64Url().decode(credentialId), attestation);
try {
switch (cred.getAttestationType()) {
case FIDOU2F:
U2fServer.registerCredential(cred, user.getEmail(), session, Constants.APP_ID);
break;
case ANDROIDSAFETYNET:
AndroidSafetyNetServer.registerCredential(
cred, user.getEmail(), session, Constants.APP_ID);
break;
default:
// This should never happen.
}
} catch (ServletException e) {
// TODO
}
Credential credential = new Credential(cred);
credential.save(user.getEmail());
List<String> resultList = new ArrayList<String>();
resultList.add(credential.toJson());
return resultList;
}
|
#vulnerable code
@ApiMethod(name = "processRegistrationResponse")
public List<String> processRegistrationResponse(
@Named("responseData") String responseData, User user)
throws OAuthRequestException, ResponseException {
if (user == null) {
throw new OAuthRequestException("User is not authenticated");
}
Gson gson = new Gson();
JsonElement element = gson.fromJson(responseData, JsonElement.class);
AuthenticatorAttestationResponse attestation =
new AuthenticatorAttestationResponse(element);
// TODO
String credentialId = BaseEncoding.base64Url().encode(
attestation.getAttestationObject().getAuthenticatorData().getAttData().getCredentialId());
String type = null;
String session = null;
PublicKeyCredential cred = new PublicKeyCredential(credentialId, type,
BaseEncoding.base64Url().decode(credentialId), attestation);
try {
switch (cred.getAttestationType()) {
case FIDOU2F:
U2fServer.registerCredential(cred, user.getEmail(), session, Constants.APP_ID);
break;
case ANDROIDSAFETYNET:
AndroidSafetyNetServer.registerCredential(
cred, user.getEmail(), session, Constants.APP_ID);
break;
default:
// This should never happen.
}
} catch (ServletException e) {
// TODO
}
Credential credential = new Credential(cred);
credential.save(user.getEmail());
List<String> resultList = new ArrayList<String>();
resultList.add(credential.toJson());
return resultList;
}
#location 16
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void shouldRunAllQueuedCallbacks() throws Exception {
final int count = 1000;
IntFunction<Callable<ResultSet>> insert = value -> () -> dbr.query("INSERT INTO CP_TEST VALUES($1)", singletonList(value));
List<Callable<ResultSet>> tasks = IntStream.range(0, count).mapToObj(insert).collect(toList());
ExecutorService executor = Executors.newFixedThreadPool(20);
executor.invokeAll(tasks).stream().map(this::await);
assertEquals(count, dbr.query("SELECT COUNT(*) FROM CP_TEST").row(0).getLong(0).longValue());
}
|
#vulnerable code
@Test
public void shouldRunAllQueuedCallbacks() throws Exception {
final AtomicInteger count = new AtomicInteger();
final CountDownLatch latch = new CountDownLatch(1000);
for(int i = 0; i < 20; i++) {
new Thread(new Runnable() {
@Override
public void run() {
final Queue<Runnable> queries = new LinkedList<>();
for(int j = 0; j < 50; j++) {
queries.add(() -> pool.query("INSERT INTO CP_TEST VALUES($1)", asList(UUID.randomUUID()), result -> {
latch.countDown();
count.incrementAndGet();
if(!queries.isEmpty()) {
queries.poll().run();
}
}, err));
}
queries.poll().run();
}
}).start();
}
assertTrue(latch.await(5L, TimeUnit.SECONDS));
assertEquals(1000, count.get());
ResultHolder result = new ResultHolder();
pool.query("SELECT COUNT(*) FROM CP_TEST", result, result.errorHandler());
assertEquals(count.get(), result.result().row(0).getLong(0).longValue());
}
#location 30
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private DataOutput createCompressorStream(final RenderedImage image, final ImageWriteParam param, final Map<Integer, Entry> entries) {
/*
36 MB test data:
No compression:
Write time: 450 ms
output.length: 36000226
PackBits:
Write time: 688 ms
output.length: 30322187
Deflate, BEST_SPEED (1):
Write time: 1276 ms
output.length: 14128866
Deflate, 2:
Write time: 1297 ms
output.length: 13848735
Deflate, 3:
Write time: 1594 ms
output.length: 13103224
Deflate, 4:
Write time: 1663 ms
output.length: 13380899 (!!)
5
Write time: 1941 ms
output.length: 13171244
6
Write time: 2311 ms
output.length: 12845101
7: Write time: 2853 ms
output.length: 12759426
8:
Write time: 4429 ms
output.length: 12624517
Deflate: DEFAULT_COMPRESSION (6?):
Write time: 2357 ms
output.length: 12845101
Deflate, BEST_COMPRESSION (9):
Write time: 4998 ms
output.length: 12600399
*/
int samplesPerPixel = (Integer) entries.get(TIFF.TAG_SAMPLES_PER_PIXEL).getValue();
int bitPerSample = ((short[]) entries.get(TIFF.TAG_BITS_PER_SAMPLE).getValue())[0];
// Use predictor by default for LZW and ZLib/Deflate
// TODO: Unless explicitly disabled in TIFFImageWriteParam
int compression = (int) entries.get(TIFF.TAG_COMPRESSION).getValue();
OutputStream stream;
switch (compression) {
case TIFFBaseline.COMPRESSION_NONE:
return imageOutput;
case TIFFBaseline.COMPRESSION_PACKBITS:
stream = IIOUtil.createStreamAdapter(imageOutput);
stream = new EncoderStream(stream, new PackBitsEncoder(), true);
// NOTE: PackBits + Predictor is possible, but not generally supported, disable it by default
// (and probably not even allow it, see http://stackoverflow.com/questions/20337400/tiff-packbits-compression-with-predictor-step)
return new DataOutputStream(stream);
case TIFFExtension.COMPRESSION_ZLIB:
case TIFFExtension.COMPRESSION_DEFLATE:
// NOTE: This interpretation does the opposite of the JAI TIFFImageWriter, but seems more correct.
// API Docs says:
// A compression quality setting of 0.0 is most generically interpreted as "high compression is important,"
// while a setting of 1.0 is most generically interpreted as "high image quality is important."
// However, the JAI TIFFImageWriter uses:
// if (param & compression etc...) {
// float quality = param.getCompressionQuality();
// deflateLevel = (int)(1 + 8*quality);
// } else {
// deflateLevel = Deflater.DEFAULT_COMPRESSION;
// }
// (in other words, 0.0 means 1 == BEST_SPEED, 1.0 means 9 == BEST_COMPRESSION)
// PS: PNGImageWriter just uses hardcoded BEST_COMPRESSION... :-P
int deflateSetting = Deflater.BEST_SPEED; // This is consistent with default compression quality being 1.0 and 0 meaning max compression...
if (param.getCompressionMode() == ImageWriteParam.MODE_EXPLICIT) {
deflateSetting = Deflater.BEST_COMPRESSION - Math.round((Deflater.BEST_COMPRESSION - 1) * param.getCompressionQuality());
}
stream = IIOUtil.createStreamAdapter(imageOutput);
stream = new DeflaterOutputStream(stream, new Deflater(deflateSetting), 1024);
if (entries.containsKey(TIFF.TAG_PREDICTOR) && entries.get(TIFF.TAG_PREDICTOR).getValue().equals(TIFFExtension.PREDICTOR_HORIZONTAL_DIFFERENCING)) {
stream = new HorizontalDifferencingStream(stream, image.getTileWidth(), samplesPerPixel, bitPerSample, imageOutput.getByteOrder());
}
return new DataOutputStream(stream);
case TIFFExtension.COMPRESSION_LZW:
stream = IIOUtil.createStreamAdapter(imageOutput);
stream = new EncoderStream(stream, new LZWEncoder((image.getTileWidth() * image.getTileHeight() * samplesPerPixel * bitPerSample + 7) / 8));
if (entries.containsKey(TIFF.TAG_PREDICTOR) && entries.get(TIFF.TAG_PREDICTOR).getValue().equals(TIFFExtension.PREDICTOR_HORIZONTAL_DIFFERENCING)) {
stream = new HorizontalDifferencingStream(stream, image.getTileWidth(), samplesPerPixel, bitPerSample, imageOutput.getByteOrder());
}
return new DataOutputStream(stream);
case TIFFBaseline.COMPRESSION_CCITT_MODIFIED_HUFFMAN_RLE:
case TIFFExtension.COMPRESSION_CCITT_T4:
case TIFFExtension.COMPRESSION_CCITT_T6:
long option = 0L;
if (compression != TIFFBaseline.COMPRESSION_CCITT_MODIFIED_HUFFMAN_RLE) {
option = (long) entries.get(compression == TIFFExtension.COMPRESSION_CCITT_T4 ? TIFF.TAG_GROUP3OPTIONS : TIFF.TAG_GROUP4OPTIONS).getValue();
}
Entry fillOrderEntry = entries.get(TIFF.TAG_FILL_ORDER);
int fillOrder = (int) (fillOrderEntry != null ? fillOrderEntry.getValue() : TIFFBaseline.FILL_LEFT_TO_RIGHT);
stream = IIOUtil.createStreamAdapter(imageOutput);
stream = new CCITTFaxEncoderStream(stream, image.getTileWidth(), image.getTileHeight(), compression, fillOrder, option);
return new DataOutputStream(stream);
}
throw new IllegalArgumentException(String.format("Unsupported TIFF compression: %d", compression));
}
|
#vulnerable code
private DataOutput createCompressorStream(final RenderedImage image, final ImageWriteParam param, final Map<Integer, Entry> entries) {
/*
36 MB test data:
No compression:
Write time: 450 ms
output.length: 36000226
PackBits:
Write time: 688 ms
output.length: 30322187
Deflate, BEST_SPEED (1):
Write time: 1276 ms
output.length: 14128866
Deflate, 2:
Write time: 1297 ms
output.length: 13848735
Deflate, 3:
Write time: 1594 ms
output.length: 13103224
Deflate, 4:
Write time: 1663 ms
output.length: 13380899 (!!)
5
Write time: 1941 ms
output.length: 13171244
6
Write time: 2311 ms
output.length: 12845101
7: Write time: 2853 ms
output.length: 12759426
8:
Write time: 4429 ms
output.length: 12624517
Deflate: DEFAULT_COMPRESSION (6?):
Write time: 2357 ms
output.length: 12845101
Deflate, BEST_COMPRESSION (9):
Write time: 4998 ms
output.length: 12600399
*/
// Use predictor by default for LZW and ZLib/Deflate
// TODO: Unless explicitly disabled in TIFFImageWriteParam
int compression = (int) entries.get(TIFF.TAG_COMPRESSION).getValue();
OutputStream stream;
switch (compression) {
case TIFFBaseline.COMPRESSION_NONE:
return imageOutput;
case TIFFBaseline.COMPRESSION_PACKBITS:
stream = IIOUtil.createStreamAdapter(imageOutput);
stream = new EncoderStream(stream, new PackBitsEncoder(), true);
// NOTE: PackBits + Predictor is possible, but not generally supported, disable it by default
// (and probably not even allow it, see http://stackoverflow.com/questions/20337400/tiff-packbits-compression-with-predictor-step)
return new DataOutputStream(stream);
case TIFFExtension.COMPRESSION_ZLIB:
case TIFFExtension.COMPRESSION_DEFLATE:
// NOTE: This interpretation does the opposite of the JAI TIFFImageWriter, but seems more correct.
// API Docs says:
// A compression quality setting of 0.0 is most generically interpreted as "high compression is important,"
// while a setting of 1.0 is most generically interpreted as "high image quality is important."
// However, the JAI TIFFImageWriter uses:
// if (param & compression etc...) {
// float quality = param.getCompressionQuality();
// deflateLevel = (int)(1 + 8*quality);
// } else {
// deflateLevel = Deflater.DEFAULT_COMPRESSION;
// }
// (in other words, 0.0 means 1 == BEST_SPEED, 1.0 means 9 == BEST_COMPRESSION)
// PS: PNGImageWriter just uses hardcoded BEST_COMPRESSION... :-P
int deflateSetting = Deflater.BEST_SPEED; // This is consistent with default compression quality being 1.0 and 0 meaning max compression...
if (param.getCompressionMode() == ImageWriteParam.MODE_EXPLICIT) {
deflateSetting = Deflater.BEST_COMPRESSION - Math.round((Deflater.BEST_COMPRESSION - 1) * param.getCompressionQuality());
}
stream = IIOUtil.createStreamAdapter(imageOutput);
stream = new DeflaterOutputStream(stream, new Deflater(deflateSetting), 1024);
if (entries.containsKey(TIFF.TAG_PREDICTOR) && entries.get(TIFF.TAG_PREDICTOR).getValue().equals(TIFFExtension.PREDICTOR_HORIZONTAL_DIFFERENCING)) {
stream = new HorizontalDifferencingStream(stream, image.getTileWidth(), image.getTile(0, 0).getNumBands(), image.getColorModel().getComponentSize(0), imageOutput.getByteOrder());
}
return new DataOutputStream(stream);
case TIFFExtension.COMPRESSION_LZW:
stream = IIOUtil.createStreamAdapter(imageOutput);
stream = new EncoderStream(stream, new LZWEncoder((image.getTileWidth() * image.getTileHeight() * image.getColorModel().getPixelSize() + 7) / 8));
if (entries.containsKey(TIFF.TAG_PREDICTOR) && entries.get(TIFF.TAG_PREDICTOR).getValue().equals(TIFFExtension.PREDICTOR_HORIZONTAL_DIFFERENCING)) {
stream = new HorizontalDifferencingStream(stream, image.getTileWidth(), image.getTile(0, 0).getNumBands(), image.getColorModel().getComponentSize(0), imageOutput.getByteOrder());
}
return new DataOutputStream(stream);
case TIFFBaseline.COMPRESSION_CCITT_MODIFIED_HUFFMAN_RLE:
case TIFFExtension.COMPRESSION_CCITT_T4:
case TIFFExtension.COMPRESSION_CCITT_T6:
long option = 0L;
if (compression != TIFFBaseline.COMPRESSION_CCITT_MODIFIED_HUFFMAN_RLE) {
option = (long) entries.get(compression == TIFFExtension.COMPRESSION_CCITT_T4 ? TIFF.TAG_GROUP3OPTIONS : TIFF.TAG_GROUP4OPTIONS).getValue();
}
Entry fillOrderEntry = entries.get(TIFF.TAG_FILL_ORDER);
int fillOrder = (int) (fillOrderEntry != null ? fillOrderEntry.getValue() : TIFFBaseline.FILL_LEFT_TO_RIGHT);
stream = IIOUtil.createStreamAdapter(imageOutput);
stream = new CCITTFaxEncoderStream(stream, image.getTileWidth(), image.getTileHeight(), compression, fillOrder, option);
return new DataOutputStream(stream);
}
throw new IllegalArgumentException(String.format("Unsupported TIFF compression: %d", compression));
}
#location 89
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private void writeBody(ByteArrayOutputStream pImageData) throws IOException {
imageOutput.writeInt(IFF.CHUNK_BODY);
imageOutput.writeInt(pImageData.size());
// NOTE: This is much faster than imageOutput.write(pImageData.toByteArray())
// as the data array is not duplicated
OutputStream adapter = IIOUtil.createStreamAdapter(imageOutput);
try {
pImageData.writeTo(adapter);
}
finally {
adapter.close();
}
if (pImageData.size() % 2 == 0) {
imageOutput.writeByte(0); // PAD
}
imageOutput.flush();
}
|
#vulnerable code
private void writeBody(ByteArrayOutputStream pImageData) throws IOException {
imageOutput.writeInt(IFF.CHUNK_BODY);
imageOutput.writeInt(pImageData.size());
// NOTE: This is much faster than mOutput.write(pImageData.toByteArray())
// as the data array is not duplicated
pImageData.writeTo(IIOUtil.createStreamAdapter(imageOutput));
if (pImageData.size() % 2 == 0) {
imageOutput.writeByte(0); // PAD
}
imageOutput.flush();
}
#location 7
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testEOFExceptionInSegmentParsingShouldNotCreateBadState() throws IOException {
ImageInputStream iis = new JPEGSegmentImageInputStream(ImageIO.createImageInputStream(getClassLoaderResource("/broken-jpeg/broken-no-sof-ascii-transfer-mode.jpg")));
byte[] buffer = new byte[4096];
// NOTE: This is a simulation of how the native parts of com.sun...JPEGImageReader would read the image...
assertEquals(2, iis.read(buffer, 0, buffer.length));
assertEquals(2, iis.getStreamPosition());
iis.seek(0x2012); // bad segment length, should have been 0x0012, not 0x2012
assertEquals(0x2012, iis.getStreamPosition());
// So far, so good (but stream position is now really beyond EOF)...
// This however, will blow up with an EOFException internally (but we'll return -1 to be good)
assertEquals(-1, iis.read(buffer, 0, buffer.length));
assertEquals(-1, iis.read());
assertEquals(0x2012, iis.getStreamPosition());
// Again, should just continue returning -1 for ever
assertEquals(-1, iis.read(buffer, 0, buffer.length));
assertEquals(-1, iis.read());
assertEquals(0x2012, iis.getStreamPosition());
}
|
#vulnerable code
@Test
public void testEOFExceptionInSegmentParsingShouldNotCreateBadState() throws IOException {
ImageInputStream iis = new JPEGSegmentImageInputStream(ImageIO.createImageInputStream(getClassLoaderResource("/broken-jpeg/broken-no-sof-ascii-transfer-mode.jpg")));
byte[] buffer = new byte[4096];
// NOTE: This is a simulation of how the native parts of com.sun...JPEGImageReader would read the image...
assertEquals(2, iis.read(buffer, 0, buffer.length));
assertEquals(2, iis.getStreamPosition());
iis.seek(0x2012); // bad segment length, should have been 0x0012, not 0x2012
assertEquals(0x2012, iis.getStreamPosition());
// So far, so good (but stream position is now really beyond EOF)...
// This however, will blow up with an EOFException internally (but we'll return -1 to be good)
assertEquals(-1, iis.read(buffer, 0, buffer.length));
assertEquals(0x2012, iis.getStreamPosition());
// Again, should just continue returning -1 for ever
assertEquals(-1, iis.read(buffer, 0, buffer.length));
assertEquals(0x2012, iis.getStreamPosition());
}
#location 12
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testReadThumbsCatalogFile() throws IOException {
try (CompoundDocument document = createTestDocument()) {
Entry root = document.getRootEntry();
assertNotNull(root);
assertEquals(25, root.getChildEntries().size());
Entry catalog = root.getChildEntry("Catalog");
assertNotNull(catalog);
assertNotNull("Input stream may not be null", catalog.getInputStream());
}
}
|
#vulnerable code
@Test
public void testReadThumbsCatalogFile() throws IOException {
CompoundDocument document = createTestDocument();
Entry root = document.getRootEntry();
assertNotNull(root);
assertEquals(25, root.getChildEntries().size());
Entry catalog = root.getChildEntry("Catalog");
assertNotNull(catalog);
assertNotNull("Input stream may not be null", catalog.getInputStream());
}
#location 10
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testContents() throws IOException {
try (CompoundDocument document = createTestDocument()) {
Entry root = document.getRootEntry();
assertNotNull(root);
SortedSet<Entry> children = new TreeSet<Entry>(root.getChildEntries());
assertEquals(25, children.size());
// Weirdness in the file format, name is *written backwards* 1-24 + Catalog
for (String name : "1,2,3,4,5,6,7,8,9,01,02,11,12,21,22,31,32,41,42,51,61,71,81,91,Catalog".split(",")) {
assertEquals(name, children.first().getName());
children.remove(children.first());
}
}
}
|
#vulnerable code
@Test
public void testContents() throws IOException {
CompoundDocument document = createTestDocument();
Entry root = document.getRootEntry();
assertNotNull(root);
SortedSet<Entry> children = new TreeSet<Entry>(root.getChildEntries());
assertEquals(25, children.size());
// Weirdness in the file format, name is *written backwards* 1-24 + Catalog
for (String name : "1,2,3,4,5,6,7,8,9,01,02,11,12,21,22,31,32,41,42,51,61,71,81,91,Catalog".split(",")) {
assertEquals(name, children.first().getName());
children.remove(children.first());
}
}
#location 5
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testContents() throws IOException {
try (CompoundDocument document = createTestDocument()) {
Entry root = document.getRootEntry();
assertNotNull(root);
SortedSet<Entry> children = new TreeSet<Entry>(root.getChildEntries());
assertEquals(25, children.size());
// Weirdness in the file format, name is *written backwards* 1-24 + Catalog
for (String name : "1,2,3,4,5,6,7,8,9,01,02,11,12,21,22,31,32,41,42,51,61,71,81,91,Catalog".split(",")) {
assertEquals(name, children.first().getName());
children.remove(children.first());
}
}
}
|
#vulnerable code
@Test
public void testContents() throws IOException {
CompoundDocument document = createTestDocument();
Entry root = document.getRootEntry();
assertNotNull(root);
SortedSet<Entry> children = new TreeSet<Entry>(root.getChildEntries());
assertEquals(25, children.size());
// Weirdness in the file format, name is *written backwards* 1-24 + Catalog
for (String name : "1,2,3,4,5,6,7,8,9,01,02,11,12,21,22,31,32,41,42,51,61,71,81,91,Catalog".split(",")) {
assertEquals(name, children.first().getName());
children.remove(children.first());
}
}
#location 9
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static ColorSpace getColorSpace(int colorSpace) {
ICC_Profile profile;
switch (colorSpace) {
case CS_ADOBE_RGB_1998:
synchronized (ColorSpaces.class) {
profile = adobeRGB1998.get();
if (profile == null) {
// Try to get system default or user-defined profile
profile = readProfileFromPath(Profiles.getPath("ADOBE_RGB_1998"));
if (profile == null) {
// Fall back to the bundled ClayRGB1998 public domain Adobe RGB 1998 compatible profile,
// which is identical for all practical purposes
profile = readProfileFromClasspathResource("/profiles/ClayRGB1998.icc");
if (profile == null) {
// Should never happen given we now bundle fallback profile...
throw new IllegalStateException("Could not read AdobeRGB1998 profile");
}
}
if (profile.getColorSpaceType() != ColorSpace.TYPE_RGB) {
throw new IllegalStateException("Configured AdobeRGB1998 profile is not TYPE_RGB");
}
adobeRGB1998 = new WeakReference<>(profile);
}
}
return createColorSpace(profile);
case CS_GENERIC_CMYK:
synchronized (ColorSpaces.class) {
profile = genericCMYK.get();
if (profile == null) {
// Try to get system default or user-defined profile
profile = readProfileFromPath(Profiles.getPath("GENERIC_CMYK"));
if (profile == null) {
if (DEBUG) {
System.out.println("Using fallback profile");
}
// Fall back to generic CMYK ColorSpace, which is *insanely slow* using ColorConvertOp... :-P
return CMYKColorSpace.getInstance();
}
if (profile.getColorSpaceType() != ColorSpace.TYPE_CMYK) {
throw new IllegalStateException("Configured Generic CMYK profile is not TYPE_CMYK");
}
genericCMYK = new WeakReference<>(profile);
}
}
return createColorSpace(profile);
default:
// Default cases for convenience
return ColorSpace.getInstance(colorSpace);
}
}
|
#vulnerable code
public static ColorSpace getColorSpace(int colorSpace) {
ICC_Profile profile;
switch (colorSpace) {
case CS_ADOBE_RGB_1998:
synchronized (ColorSpaces.class) {
profile = adobeRGB1998.get();
if (profile == null) {
// Try to get system default or user-defined profile
profile = readProfileFromPath(Profiles.getPath("ADOBE_RGB_1998"));
if (profile == null) {
// Fall back to the bundled ClayRGB1998 public domain Adobe RGB 1998 compatible profile,
// which is identical for all practical purposes
profile = readProfileFromClasspathResource("/profiles/ClayRGB1998.icc");
if (profile == null) {
// Should never happen given we now bundle fallback profile...
throw new IllegalStateException("Could not read AdobeRGB1998 profile");
}
}
adobeRGB1998 = new WeakReference<>(profile);
}
}
return createColorSpace(profile);
case CS_GENERIC_CMYK:
synchronized (ColorSpaces.class) {
profile = genericCMYK.get();
if (profile == null) {
// Try to get system default or user-defined profile
profile = readProfileFromPath(Profiles.getPath("GENERIC_CMYK"));
if (profile == null) {
if (DEBUG) {
System.out.println("Using fallback profile");
}
// Fall back to generic CMYK ColorSpace, which is *insanely slow* using ColorConvertOp... :-P
return CMYKColorSpace.getInstance();
}
genericCMYK = new WeakReference<>(profile);
}
}
return createColorSpace(profile);
default:
// Default cases for convenience
return ColorSpace.getInstance(colorSpace);
}
}
#location 28
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private DataOutput createCompressorStream(final RenderedImage image, final ImageWriteParam param, final Map<Integer, Entry> entries) {
/*
36 MB test data:
No compression:
Write time: 450 ms
output.length: 36000226
PackBits:
Write time: 688 ms
output.length: 30322187
Deflate, BEST_SPEED (1):
Write time: 1276 ms
output.length: 14128866
Deflate, 2:
Write time: 1297 ms
output.length: 13848735
Deflate, 3:
Write time: 1594 ms
output.length: 13103224
Deflate, 4:
Write time: 1663 ms
output.length: 13380899 (!!)
5
Write time: 1941 ms
output.length: 13171244
6
Write time: 2311 ms
output.length: 12845101
7: Write time: 2853 ms
output.length: 12759426
8:
Write time: 4429 ms
output.length: 12624517
Deflate: DEFAULT_COMPRESSION (6?):
Write time: 2357 ms
output.length: 12845101
Deflate, BEST_COMPRESSION (9):
Write time: 4998 ms
output.length: 12600399
*/
int samplesPerPixel = (Integer) entries.get(TIFF.TAG_SAMPLES_PER_PIXEL).getValue();
int bitPerSample = ((short[]) entries.get(TIFF.TAG_BITS_PER_SAMPLE).getValue())[0];
// Use predictor by default for LZW and ZLib/Deflate
// TODO: Unless explicitly disabled in TIFFImageWriteParam
int compression = (int) entries.get(TIFF.TAG_COMPRESSION).getValue();
OutputStream stream;
switch (compression) {
case TIFFBaseline.COMPRESSION_NONE:
return imageOutput;
case TIFFBaseline.COMPRESSION_PACKBITS:
stream = IIOUtil.createStreamAdapter(imageOutput);
stream = new EncoderStream(stream, new PackBitsEncoder(), true);
// NOTE: PackBits + Predictor is possible, but not generally supported, disable it by default
// (and probably not even allow it, see http://stackoverflow.com/questions/20337400/tiff-packbits-compression-with-predictor-step)
return new DataOutputStream(stream);
case TIFFExtension.COMPRESSION_ZLIB:
case TIFFExtension.COMPRESSION_DEFLATE:
// NOTE: This interpretation does the opposite of the JAI TIFFImageWriter, but seems more correct.
// API Docs says:
// A compression quality setting of 0.0 is most generically interpreted as "high compression is important,"
// while a setting of 1.0 is most generically interpreted as "high image quality is important."
// However, the JAI TIFFImageWriter uses:
// if (param & compression etc...) {
// float quality = param.getCompressionQuality();
// deflateLevel = (int)(1 + 8*quality);
// } else {
// deflateLevel = Deflater.DEFAULT_COMPRESSION;
// }
// (in other words, 0.0 means 1 == BEST_SPEED, 1.0 means 9 == BEST_COMPRESSION)
// PS: PNGImageWriter just uses hardcoded BEST_COMPRESSION... :-P
int deflateSetting = Deflater.BEST_SPEED; // This is consistent with default compression quality being 1.0 and 0 meaning max compression...
if (param.getCompressionMode() == ImageWriteParam.MODE_EXPLICIT) {
deflateSetting = Deflater.BEST_COMPRESSION - Math.round((Deflater.BEST_COMPRESSION - 1) * param.getCompressionQuality());
}
stream = IIOUtil.createStreamAdapter(imageOutput);
stream = new DeflaterOutputStream(stream, new Deflater(deflateSetting), 1024);
if (entries.containsKey(TIFF.TAG_PREDICTOR) && entries.get(TIFF.TAG_PREDICTOR).getValue().equals(TIFFExtension.PREDICTOR_HORIZONTAL_DIFFERENCING)) {
stream = new HorizontalDifferencingStream(stream, image.getTileWidth(), samplesPerPixel, bitPerSample, imageOutput.getByteOrder());
}
return new DataOutputStream(stream);
case TIFFExtension.COMPRESSION_LZW:
stream = IIOUtil.createStreamAdapter(imageOutput);
stream = new EncoderStream(stream, new LZWEncoder((image.getTileWidth() * image.getTileHeight() * samplesPerPixel * bitPerSample + 7) / 8));
if (entries.containsKey(TIFF.TAG_PREDICTOR) && entries.get(TIFF.TAG_PREDICTOR).getValue().equals(TIFFExtension.PREDICTOR_HORIZONTAL_DIFFERENCING)) {
stream = new HorizontalDifferencingStream(stream, image.getTileWidth(), samplesPerPixel, bitPerSample, imageOutput.getByteOrder());
}
return new DataOutputStream(stream);
case TIFFBaseline.COMPRESSION_CCITT_MODIFIED_HUFFMAN_RLE:
case TIFFExtension.COMPRESSION_CCITT_T4:
case TIFFExtension.COMPRESSION_CCITT_T6:
long option = 0L;
if (compression != TIFFBaseline.COMPRESSION_CCITT_MODIFIED_HUFFMAN_RLE) {
option = (long) entries.get(compression == TIFFExtension.COMPRESSION_CCITT_T4 ? TIFF.TAG_GROUP3OPTIONS : TIFF.TAG_GROUP4OPTIONS).getValue();
}
Entry fillOrderEntry = entries.get(TIFF.TAG_FILL_ORDER);
int fillOrder = (int) (fillOrderEntry != null ? fillOrderEntry.getValue() : TIFFBaseline.FILL_LEFT_TO_RIGHT);
stream = IIOUtil.createStreamAdapter(imageOutput);
stream = new CCITTFaxEncoderStream(stream, image.getTileWidth(), image.getTileHeight(), compression, fillOrder, option);
return new DataOutputStream(stream);
}
throw new IllegalArgumentException(String.format("Unsupported TIFF compression: %d", compression));
}
|
#vulnerable code
private DataOutput createCompressorStream(final RenderedImage image, final ImageWriteParam param, final Map<Integer, Entry> entries) {
/*
36 MB test data:
No compression:
Write time: 450 ms
output.length: 36000226
PackBits:
Write time: 688 ms
output.length: 30322187
Deflate, BEST_SPEED (1):
Write time: 1276 ms
output.length: 14128866
Deflate, 2:
Write time: 1297 ms
output.length: 13848735
Deflate, 3:
Write time: 1594 ms
output.length: 13103224
Deflate, 4:
Write time: 1663 ms
output.length: 13380899 (!!)
5
Write time: 1941 ms
output.length: 13171244
6
Write time: 2311 ms
output.length: 12845101
7: Write time: 2853 ms
output.length: 12759426
8:
Write time: 4429 ms
output.length: 12624517
Deflate: DEFAULT_COMPRESSION (6?):
Write time: 2357 ms
output.length: 12845101
Deflate, BEST_COMPRESSION (9):
Write time: 4998 ms
output.length: 12600399
*/
// Use predictor by default for LZW and ZLib/Deflate
// TODO: Unless explicitly disabled in TIFFImageWriteParam
int compression = (int) entries.get(TIFF.TAG_COMPRESSION).getValue();
OutputStream stream;
switch (compression) {
case TIFFBaseline.COMPRESSION_NONE:
return imageOutput;
case TIFFBaseline.COMPRESSION_PACKBITS:
stream = IIOUtil.createStreamAdapter(imageOutput);
stream = new EncoderStream(stream, new PackBitsEncoder(), true);
// NOTE: PackBits + Predictor is possible, but not generally supported, disable it by default
// (and probably not even allow it, see http://stackoverflow.com/questions/20337400/tiff-packbits-compression-with-predictor-step)
return new DataOutputStream(stream);
case TIFFExtension.COMPRESSION_ZLIB:
case TIFFExtension.COMPRESSION_DEFLATE:
// NOTE: This interpretation does the opposite of the JAI TIFFImageWriter, but seems more correct.
// API Docs says:
// A compression quality setting of 0.0 is most generically interpreted as "high compression is important,"
// while a setting of 1.0 is most generically interpreted as "high image quality is important."
// However, the JAI TIFFImageWriter uses:
// if (param & compression etc...) {
// float quality = param.getCompressionQuality();
// deflateLevel = (int)(1 + 8*quality);
// } else {
// deflateLevel = Deflater.DEFAULT_COMPRESSION;
// }
// (in other words, 0.0 means 1 == BEST_SPEED, 1.0 means 9 == BEST_COMPRESSION)
// PS: PNGImageWriter just uses hardcoded BEST_COMPRESSION... :-P
int deflateSetting = Deflater.BEST_SPEED; // This is consistent with default compression quality being 1.0 and 0 meaning max compression...
if (param.getCompressionMode() == ImageWriteParam.MODE_EXPLICIT) {
deflateSetting = Deflater.BEST_COMPRESSION - Math.round((Deflater.BEST_COMPRESSION - 1) * param.getCompressionQuality());
}
stream = IIOUtil.createStreamAdapter(imageOutput);
stream = new DeflaterOutputStream(stream, new Deflater(deflateSetting), 1024);
if (entries.containsKey(TIFF.TAG_PREDICTOR) && entries.get(TIFF.TAG_PREDICTOR).getValue().equals(TIFFExtension.PREDICTOR_HORIZONTAL_DIFFERENCING)) {
stream = new HorizontalDifferencingStream(stream, image.getTileWidth(), image.getTile(0, 0).getNumBands(), image.getColorModel().getComponentSize(0), imageOutput.getByteOrder());
}
return new DataOutputStream(stream);
case TIFFExtension.COMPRESSION_LZW:
stream = IIOUtil.createStreamAdapter(imageOutput);
stream = new EncoderStream(stream, new LZWEncoder((image.getTileWidth() * image.getTileHeight() * image.getColorModel().getPixelSize() + 7) / 8));
if (entries.containsKey(TIFF.TAG_PREDICTOR) && entries.get(TIFF.TAG_PREDICTOR).getValue().equals(TIFFExtension.PREDICTOR_HORIZONTAL_DIFFERENCING)) {
stream = new HorizontalDifferencingStream(stream, image.getTileWidth(), image.getTile(0, 0).getNumBands(), image.getColorModel().getComponentSize(0), imageOutput.getByteOrder());
}
return new DataOutputStream(stream);
case TIFFBaseline.COMPRESSION_CCITT_MODIFIED_HUFFMAN_RLE:
case TIFFExtension.COMPRESSION_CCITT_T4:
case TIFFExtension.COMPRESSION_CCITT_T6:
long option = 0L;
if (compression != TIFFBaseline.COMPRESSION_CCITT_MODIFIED_HUFFMAN_RLE) {
option = (long) entries.get(compression == TIFFExtension.COMPRESSION_CCITT_T4 ? TIFF.TAG_GROUP3OPTIONS : TIFF.TAG_GROUP4OPTIONS).getValue();
}
Entry fillOrderEntry = entries.get(TIFF.TAG_FILL_ORDER);
int fillOrder = (int) (fillOrderEntry != null ? fillOrderEntry.getValue() : TIFFBaseline.FILL_LEFT_TO_RIGHT);
stream = IIOUtil.createStreamAdapter(imageOutput);
stream = new CCITTFaxEncoderStream(stream, image.getTileWidth(), image.getTileHeight(), compression, fillOrder, option);
return new DataOutputStream(stream);
}
throw new IllegalArgumentException(String.format("Unsupported TIFF compression: %d", compression));
}
#location 91
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private CompoundDirectory getExif() throws IOException {
List<Application> exifSegments = getAppSegments(JPEG.APP1, "Exif");
if (!exifSegments.isEmpty()) {
Application exif = exifSegments.get(0);
int offset = exif.identifier.length() + 2; // Incl. pad
if (exif.data.length <= offset) {
processWarningOccurred("Exif chunk has no data.");
}
else {
// TODO: Consider returning ByteArrayImageInputStream from Segment.data()
try (ImageInputStream stream = new ByteArrayImageInputStream(exif.data, offset, exif.data.length - offset)) {
return (CompoundDirectory) new TIFFReader().read(stream);
}
}
}
return null;
}
|
#vulnerable code
private CompoundDirectory getExif() throws IOException {
List<Application> exifSegments = getAppSegments(JPEG.APP1, "Exif");
if (!exifSegments.isEmpty()) {
Application exif = exifSegments.get(0);
InputStream data = exif.data();
if (data.read() == -1) { // Read pad
processWarningOccurred("Exif chunk has no data.");
}
else {
ImageInputStream stream = new MemoryCacheImageInputStream(data);
return (CompoundDirectory) new TIFFReader().read(stream);
// TODO: Directory offset of thumbnail is wrong/relative to container stream, causing trouble for the TIFFReader...
}
}
return null;
}
#location 13
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test(expected = UnsupportedOperationException.class)
public void testChildEntriesUnmodifiable() throws IOException {
try (CompoundDocument document = createTestDocument()) {
Entry root = document.getRootEntry();
assertNotNull(root);
SortedSet<Entry> children = root.getChildEntries();
// Should not be allowed, as it modifies the internal structure
children.remove(children.first());
}
}
|
#vulnerable code
@Test(expected = UnsupportedOperationException.class)
public void testChildEntriesUnmodifiable() throws IOException {
CompoundDocument document = createTestDocument();
Entry root = document.getRootEntry();
assertNotNull(root);
SortedSet<Entry> children = root.getChildEntries();
// Should not be allowed, as it modifies the internal structure
children.remove(children.first());
}
#location 5
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testReadThumbsCatalogFile() throws IOException {
try (CompoundDocument document = createTestDocument()) {
Entry root = document.getRootEntry();
assertNotNull(root);
assertEquals(25, root.getChildEntries().size());
Entry catalog = root.getChildEntry("Catalog");
assertNotNull(catalog);
assertNotNull("Input stream may not be null", catalog.getInputStream());
}
}
|
#vulnerable code
@Test
public void testReadThumbsCatalogFile() throws IOException {
CompoundDocument document = createTestDocument();
Entry root = document.getRootEntry();
assertNotNull(root);
assertEquals(25, root.getChildEntries().size());
Entry catalog = root.getChildEntry("Catalog");
assertNotNull(catalog);
assertNotNull("Input stream may not be null", catalog.getInputStream());
}
#location 14
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@DELETE
@Path("/reports/{name}")
public void deleteReport(@PathParam("name") String name) {
try {
ItemCollection itemCol = reportService.findReport(name);
entityService.remove(itemCol);
} catch (Exception e) {
e.printStackTrace();
}
}
|
#vulnerable code
@DELETE
@Path("/reports/{name}")
public void deleteReport(@PathParam("name") String name) {
try {
ItemCollection itemCol = reportService.getReport(name);
entityService.remove(itemCol);
} catch (Exception e) {
e.printStackTrace();
}
}
#location 6
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testAddWorkdaysFromSaturday() {
Calendar startDate = Calendar.getInstance();
// adjust to SATURDAY
startDate.set(Calendar.DAY_OF_WEEK, Calendar.SATURDAY);
System.out.println("Startdate=" + startDate.getTime());
// adjust -1 Workdays -> TUESDAY
Assert.assertEquals(Calendar.TUESDAY,
workflowSchedulerService.addWorkDays(startDate, 1).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.FRIDAY,
workflowSchedulerService.addWorkDays(startDate, 4).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.MONDAY,
workflowSchedulerService.addWorkDays(startDate, 5).get(Calendar.DAY_OF_WEEK));
}
|
#vulnerable code
@Test
public void testAddWorkdaysFromSaturday() {
Calendar startDate = Calendar.getInstance();
// adjust to SATURDAY
startDate.set(Calendar.DAY_OF_WEEK, Calendar.SATURDAY);
System.out.println("Startdate=" + startDate.getTime());
// adjust -1 Workdays -> TUESDAY
Assert.assertEquals(Calendar.TUESDAY,
WorkflowSchedulerService.addWorkDays(startDate, 1).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.FRIDAY,
WorkflowSchedulerService.addWorkDays(startDate, 4).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.MONDAY,
WorkflowSchedulerService.addWorkDays(startDate, 5).get(Calendar.DAY_OF_WEEK));
}
#location 14
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
boolean flushEventLogByCount(int count) {
Date lastEventDate = null;
boolean cacheIsEmpty = true;
IndexWriter indexWriter = null;
long l = System.currentTimeMillis();
logger.finest("......flush eventlog cache....");
List<EventLogEntry> events = eventLogService.findEvents(count + 1,
EVENTLOG_TOPIC_ADD, EVENTLOG_TOPIC_REMOVE);
if (events != null && events.size() > 0) {
try {
indexWriter = createIndexWriter();
int _counter = 0;
for (EventLogEntry eventLogEntry : events) {
Term term = new Term("$uniqueid", eventLogEntry.getUniqueID());
// lookup the Document Entity...
org.imixs.workflow.engine.jpa.Document doc = manager
.find(org.imixs.workflow.engine.jpa.Document.class, eventLogEntry.getUniqueID());
// if the document was found we add/update the index. Otherwise we remove the
// document form the index.
if (doc != null && EVENTLOG_TOPIC_ADD.equals(eventLogEntry.getTopic())) {
// add workitem to search index....
long l2 = System.currentTimeMillis();
ItemCollection workitem = new ItemCollection();
workitem.setAllItems(doc.getData());
if (!workitem.getItemValueBoolean(DocumentService.NOINDEX)) {
indexWriter.updateDocument(term, createDocument(workitem));
logger.finest("......lucene add/update workitem '" + doc.getId() + "' to index in "
+ (System.currentTimeMillis() - l2) + "ms");
}
} else {
long l2 = System.currentTimeMillis();
indexWriter.deleteDocuments(term);
logger.finest("......lucene remove workitem '" + term + "' from index in "
+ (System.currentTimeMillis() - l2) + "ms");
}
// remove the eventLogEntry.
lastEventDate = eventLogEntry.getModified().getTime();
eventLogService.removeEvent(eventLogEntry);
// break?
_counter++;
if (_counter >= count) {
// we skipp the last one if the maximum was reached.
cacheIsEmpty = false;
break;
}
}
} catch (IOException luceneEx) {
logger.warning("...unable to flush lucene event log: " + luceneEx.getMessage());
// We just log a warning here and close the flush mode to no longer block the
// writer.
// NOTE: maybe throwing a IndexException would be an alternative:
//
// throw new IndexException(IndexException.INVALID_INDEX, "Unable to update
// lucene search index",
// luceneEx);
return true;
} finally {
// close writer!
if (indexWriter != null) {
logger.finest("......lucene close IndexWriter...");
try {
indexWriter.close();
} catch (CorruptIndexException e) {
throw new IndexException(IndexException.INVALID_INDEX, "Unable to close lucene IndexWriter: ",
e);
} catch (IOException e) {
throw new IndexException(IndexException.INVALID_INDEX, "Unable to close lucene IndexWriter: ",
e);
}
}
}
}
logger.fine("...flushEventLog - " + events.size() + " events in " + (System.currentTimeMillis() - l)
+ " ms - last log entry: " + lastEventDate);
return cacheIsEmpty;
}
|
#vulnerable code
boolean flushEventLogByCount(int count) {
Date lastEventDate = null;
boolean cacheIsEmpty = true;
IndexWriter indexWriter = null;
long l = System.currentTimeMillis();
logger.finest("......flush eventlog cache....");
List<org.imixs.workflow.engine.jpa.Document> documentList = eventLogService.findEvents(count + 1,
EVENTLOG_TOPIC_ADD, EVENTLOG_TOPIC_REMOVE);
if (documentList != null && documentList.size() > 0) {
try {
indexWriter = createIndexWriter();
int _counter = 0;
for (org.imixs.workflow.engine.jpa.Document eventLogEntry : documentList) {
String topic = null;
String id = eventLogEntry.getId();
// cut prafix...
if (id.startsWith(EVENTLOG_TOPIC_ADD)) {
id = id.substring(EVENTLOG_TOPIC_ADD.length() + 1);
topic = EVENTLOG_TOPIC_ADD;
}
if (id.startsWith(EVENTLOG_TOPIC_REMOVE)) {
id = id.substring(EVENTLOG_TOPIC_REMOVE.length() + 1);
topic = EVENTLOG_TOPIC_REMOVE;
}
// lookup the workitem...
org.imixs.workflow.engine.jpa.Document doc = manager
.find(org.imixs.workflow.engine.jpa.Document.class, id);
Term term = new Term("$uniqueid", id);
// if the document was found we add/update the index. Otherwise we remove the
// document form the index.
if (doc != null && EVENTLOG_TOPIC_ADD.equals(topic)) {
// add workitem to search index....
long l2 = System.currentTimeMillis();
ItemCollection workitem = new ItemCollection();
workitem.setAllItems(doc.getData());
if (!workitem.getItemValueBoolean(DocumentService.NOINDEX)) {
indexWriter.updateDocument(term, createDocument(workitem));
logger.finest("......lucene add/update workitem '" + id + "' to index in "
+ (System.currentTimeMillis() - l2) + "ms");
}
} else {
long l2 = System.currentTimeMillis();
indexWriter.deleteDocuments(term);
logger.finest("......lucene remove workitem '" + id + "' from index in "
+ (System.currentTimeMillis() - l2) + "ms");
}
// remove the eventLogEntry.
lastEventDate = eventLogEntry.getCreated().getTime();
manager.remove(eventLogEntry);
// break?
_counter++;
if (_counter >= count) {
// we skipp the last one if the maximum was reached.
cacheIsEmpty = false;
break;
}
}
} catch (IOException luceneEx) {
logger.warning("...unable to flush lucene event log: " + luceneEx.getMessage());
// We just log a warning here and close the flush mode to no longer block the
// writer.
// NOTE: maybe throwing a IndexException would be an alternative:
//
// throw new IndexException(IndexException.INVALID_INDEX, "Unable to update
// lucene search index",
// luceneEx);
return true;
} finally {
// close writer!
if (indexWriter != null) {
logger.finest("......lucene close IndexWriter...");
try {
indexWriter.close();
} catch (CorruptIndexException e) {
throw new IndexException(IndexException.INVALID_INDEX, "Unable to close lucene IndexWriter: ",
e);
} catch (IOException e) {
throw new IndexException(IndexException.INVALID_INDEX, "Unable to close lucene IndexWriter: ",
e);
}
}
}
}
logger.fine("...flushEventLog - " + documentList.size() + " events in " + (System.currentTimeMillis() - l)
+ " ms - last log entry: " + lastEventDate);
return cacheIsEmpty;
}
#location 90
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testAddWorkdaysFromMonday() {
Calendar startDate = Calendar.getInstance();
// adjust to FRIDAY
startDate.set(Calendar.DAY_OF_WEEK, Calendar.MONDAY);
System.out.println("Startdate=" + startDate.getTime());
Assert.assertEquals(Calendar.TUESDAY,
workflowSchedulerService.addWorkDays(startDate, 1).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.WEDNESDAY,
workflowSchedulerService.addWorkDays(startDate, 2).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.FRIDAY,
workflowSchedulerService.addWorkDays(startDate, 4).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.MONDAY,
workflowSchedulerService.addWorkDays(startDate, 5).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.FRIDAY,
workflowSchedulerService.addWorkDays(startDate, 9).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.MONDAY,
workflowSchedulerService.addWorkDays(startDate, 10).get(Calendar.DAY_OF_WEEK));
}
|
#vulnerable code
@Test
public void testAddWorkdaysFromMonday() {
Calendar startDate = Calendar.getInstance();
// adjust to FRIDAY
startDate.set(Calendar.DAY_OF_WEEK, Calendar.MONDAY);
System.out.println("Startdate=" + startDate.getTime());
Assert.assertEquals(Calendar.TUESDAY,
WorkflowSchedulerService.addWorkDays(startDate, 1).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.WEDNESDAY,
WorkflowSchedulerService.addWorkDays(startDate, 2).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.FRIDAY,
WorkflowSchedulerService.addWorkDays(startDate, 4).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.MONDAY,
WorkflowSchedulerService.addWorkDays(startDate, 5).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.FRIDAY,
WorkflowSchedulerService.addWorkDays(startDate, 9).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.MONDAY,
WorkflowSchedulerService.addWorkDays(startDate, 10).get(Calendar.DAY_OF_WEEK));
}
#location 19
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testAddWorkdaysFromSaturday() {
Calendar startDate = Calendar.getInstance();
// adjust to SATURDAY
startDate.set(Calendar.DAY_OF_WEEK, Calendar.SATURDAY);
System.out.println("Startdate=" + startDate.getTime());
// adjust -1 Workdays -> TUESDAY
Assert.assertEquals(Calendar.TUESDAY,
workflowSchedulerService.addWorkDays(startDate, 1).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.FRIDAY,
workflowSchedulerService.addWorkDays(startDate, 4).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.MONDAY,
workflowSchedulerService.addWorkDays(startDate, 5).get(Calendar.DAY_OF_WEEK));
}
|
#vulnerable code
@Test
public void testAddWorkdaysFromSaturday() {
Calendar startDate = Calendar.getInstance();
// adjust to SATURDAY
startDate.set(Calendar.DAY_OF_WEEK, Calendar.SATURDAY);
System.out.println("Startdate=" + startDate.getTime());
// adjust -1 Workdays -> TUESDAY
Assert.assertEquals(Calendar.TUESDAY,
WorkflowSchedulerService.addWorkDays(startDate, 1).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.FRIDAY,
WorkflowSchedulerService.addWorkDays(startDate, 4).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.MONDAY,
WorkflowSchedulerService.addWorkDays(startDate, 5).get(Calendar.DAY_OF_WEEK));
}
#location 11
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testParseResult() {
List<ItemCollection> result=null;
String testString = "{\n" +
" \"responseHeader\":{\n" +
" \"status\":0,\n" +
" \"QTime\":4,\n" +
" \"params\":{\n" +
" \"q\":\"*:*\",\n" +
" \"_\":\"1567286252995\"}},\n" +
" \"response\":{\"numFound\":2,\"start\":0,\"docs\":[\n" +
" {\n" +
" \"type\":[\"model\"],\n" +
" \"id\":\"3a182d18-33d9-4951-8970-d9eaf9d337ff\",\n" +
" \"_modified\":[20190831211617],\n" +
" \"_created\":[20190831211617],\n" +
" \"_version_\":1643418672068296704},\n" +
" {\n" +
" \"type\":[\"adminp\"],\n" +
" \"id\":\"60825929-4d7d-4346-9333-afd7dbfca457\",\n" +
" \"_modified\":[20190831211618],\n" +
" \"_created\":[20190831211618],\n" +
" \"_version_\":1643418672172105728}]\n" +
" }}";
result=solrSearchService.parseQueryResult(testString);
Assert.assertEquals(2,result.size());
ItemCollection document=null;
document=result.get(0);
Assert.assertEquals("model", document.getItemValueString("type"));
Assert.assertEquals("3a182d18-33d9-4951-8970-d9eaf9d337ff", document.getItemValueString("id"));
Assert.assertEquals(1567278977000l, document.getItemValueDate("_modified").getTime());
Assert.assertEquals(1567278977000l, document.getItemValueDate("_created").getTime());
document=result.get(1);
Assert.assertEquals("adminp", document.getItemValueString("type"));
Assert.assertEquals("60825929-4d7d-4346-9333-afd7dbfca457",document.getItemValueString("id"));
Assert.assertEquals(1567278978000l, document.getItemValueDate("_created").getTime());
}
|
#vulnerable code
@Test
public void testParseResult() {
List<ItemCollection> result=null;
String testString = "{\n" +
" \"responseHeader\":{\n" +
" \"status\":0,\n" +
" \"QTime\":4,\n" +
" \"params\":{\n" +
" \"q\":\"*:*\",\n" +
" \"_\":\"1567286252995\"}},\n" +
" \"response\":{\"numFound\":2,\"start\":0,\"docs\":[\n" +
" {\n" +
" \"type\":[\"model\"],\n" +
" \"id\":\"3a182d18-33d9-4951-8970-d9eaf9d337ff\",\n" +
" \"_modified\":[20190831211617],\n" +
" \"_created\":[20190831211617],\n" +
" \"_version_\":1643418672068296704},\n" +
" {\n" +
" \"type\":[\"adminp\"],\n" +
" \"id\":\"60825929-4d7d-4346-9333-afd7dbfca457\",\n" +
" \"_modified\":[20190831211618],\n" +
" \"_created\":[20190831211618],\n" +
" \"_version_\":1643418672172105728}]\n" +
" }}";
result=solrSearchService.parseQueryResult(testString);
Assert.assertEquals(2,result.size());
ItemCollection document=null;
document=result.get(0);
Assert.assertEquals("model", document.getItemValueString("type"));
Assert.assertEquals("3a182d18-33d9-4951-8970-d9eaf9d337ff", document.getUniqueID());
Assert.assertEquals(1567278977000l, document.getItemValueDate("$modified").getTime());
Assert.assertEquals(1567278977000l, document.getItemValueDate("$created").getTime());
document=result.get(1);
Assert.assertEquals("adminp", document.getItemValueString("type"));
Assert.assertEquals("60825929-4d7d-4346-9333-afd7dbfca457", document.getUniqueID());
Assert.assertEquals(1567278978000l, document.getItemValueDate("$created").getTime());
}
#location 37
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testAddWorkdaysFromSaturday() {
Calendar startDate = Calendar.getInstance();
// adjust to SATURDAY
startDate.set(Calendar.DAY_OF_WEEK, Calendar.SATURDAY);
System.out.println("Startdate=" + startDate.getTime());
// adjust -1 Workdays -> TUESDAY
Assert.assertEquals(Calendar.TUESDAY,
workflowSchedulerService.addWorkDays(startDate, 1).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.FRIDAY,
workflowSchedulerService.addWorkDays(startDate, 4).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.MONDAY,
workflowSchedulerService.addWorkDays(startDate, 5).get(Calendar.DAY_OF_WEEK));
}
|
#vulnerable code
@Test
public void testAddWorkdaysFromSaturday() {
Calendar startDate = Calendar.getInstance();
// adjust to SATURDAY
startDate.set(Calendar.DAY_OF_WEEK, Calendar.SATURDAY);
System.out.println("Startdate=" + startDate.getTime());
// adjust -1 Workdays -> TUESDAY
Assert.assertEquals(Calendar.TUESDAY,
WorkflowSchedulerService.addWorkDays(startDate, 1).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.FRIDAY,
WorkflowSchedulerService.addWorkDays(startDate, 4).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.MONDAY,
WorkflowSchedulerService.addWorkDays(startDate, 5).get(Calendar.DAY_OF_WEEK));
}
#location 18
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void removeWorkitem(String uniqueID) throws PluginException {
IndexWriter awriter = null;
try {
awriter = createIndexWriter();
Term term = new Term("$uniqueid", uniqueID);
awriter.deleteDocuments(term);
} catch (CorruptIndexException e) {
throw new PluginException(LucenePlugin.class.getSimpleName(), INVALID_INDEX,
"Unable to remove workitem '" + uniqueID + "' from search index", e);
} catch (LockObtainFailedException e) {
throw new PluginException(LucenePlugin.class.getSimpleName(), INVALID_INDEX,
"Unable to remove workitem '" + uniqueID + "' from search index", e);
} catch (IOException e) {
throw new PluginException(LucenePlugin.class.getSimpleName(), INVALID_INDEX,
"Unable to remove workitem '" + uniqueID + "' from search index", e);
}
}
|
#vulnerable code
public void removeWorkitem(String uniqueID) throws PluginException {
IndexWriter awriter = null;
Properties prop = propertyService.getProperties();
if (!prop.isEmpty()) {
try {
awriter = createIndexWriter(prop);
Term term = new Term("$uniqueid", uniqueID);
awriter.deleteDocuments(term);
} catch (CorruptIndexException e) {
throw new PluginException(LucenePlugin.class.getSimpleName(), INVALID_INDEX,
"Unable to remove workitem '" + uniqueID + "' from search index", e);
} catch (LockObtainFailedException e) {
throw new PluginException(LucenePlugin.class.getSimpleName(), INVALID_INDEX,
"Unable to remove workitem '" + uniqueID + "' from search index", e);
} catch (IOException e) {
throw new PluginException(LucenePlugin.class.getSimpleName(), INVALID_INDEX,
"Unable to remove workitem '" + uniqueID + "' from search index", e);
}
}
}
#location 8
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
// end of bpmn2:process
if (qName.equalsIgnoreCase("bpmn2:process")) {
if (currentWorkflowGroup != null) {
currentWorkflowGroup = null;
}
}
// end of bpmn2:task -
if (bImixsTask && qName.equalsIgnoreCase("bpmn2:task")) {
bImixsTask = false;
taskCache.put(bpmnID, currentEntity);
}
if (qName.equalsIgnoreCase("bpmn2:extensionElements")) {
bExtensionElements = false;
}
// end of bpmn2:intermediateCatchEvent -
if (bImixsEvent && (qName.equalsIgnoreCase("bpmn2:intermediateCatchEvent")
|| qName.equalsIgnoreCase("bpmn2:intermediateThrowEvent"))) {
bImixsEvent = false;
// we need to cache the activities because the sequenceflows must be
// analysed later
eventCache.put(bpmnID, currentEntity);
}
/*
* End of a imixs:value
*/
if (qName.equalsIgnoreCase("imixs:value")) {
if (bExtensionElements && bItemValue && currentEntity != null && characterStream != null) {
String svalue = characterStream.toString();
List valueList = currentEntity.getItemValue(currentItemName);
if ("xs:boolean".equals(currentItemType.toLowerCase())) {
valueList.add(Boolean.valueOf(svalue));
} else if ("xs:integer".equals(currentItemType.toLowerCase())) {
valueList.add(Integer.valueOf(svalue));
} else {
valueList.add(svalue);
}
// item will only be added if it is not listed in the ignoreItem
// List!
if (!ignoreItemList.contains(currentItemName)) {
currentEntity.replaceItemValue(currentItemName, valueList);
}
}
bItemValue = false;
characterStream = null;
}
if (qName.equalsIgnoreCase("bpmn2:documentation")) {
if (currentEntity != null) {
currentEntity.replaceItemValue("rtfdescription", characterStream.toString());
}
// bpmn2:message?
if (bMessage) {
// cache the message...
messageCache.put(currentMessageName, characterStream.toString());
bMessage = false;
}
// bpmn2:annotation?
if (bAnnotation) {
// cache the annotation
annotationCache.put(currentAnnotationName, characterStream.toString());
bAnnotation = false;
}
characterStream = null;
bdocumentation = false;
}
// end of bpmn2:intermediateThrowEvent -
if (bLinkThrowEvent && !bLinkCatchEvent && (qName.equalsIgnoreCase("bpmn2:linkEventDefinition"))) {
bLinkThrowEvent = false;
// we need to cache the link name
linkThrowEventCache.put(bpmnID, currentLinkName);
}
// end of bpmn2:intermediateCatchEvent -
if (bLinkCatchEvent && !bLinkThrowEvent && (qName.equalsIgnoreCase("bpmn2:linkEventDefinition"))) {
bLinkCatchEvent = false;
// we need to cache the link name
linkCatchEventCache.put(currentLinkName, bpmnID);
}
// test conditional sequence flow...
if (bSequenceFlow && bconditionExpression && qName.equalsIgnoreCase("bpmn2:conditionExpression")) {
String svalue = characterStream.toString();
logger.fine("conditional SequenceFlow:" + bpmnID + "=" + svalue);
bconditionExpression = false;
conditionCache.put(bpmnID, svalue);
}
}
|
#vulnerable code
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
// end of bpmn2:process
if (qName.equalsIgnoreCase("bpmn2:process")) {
if (currentWorkflowGroup != null) {
currentWorkflowGroup = null;
}
}
// end of bpmn2:task -
if (bImixsTask && qName.equalsIgnoreCase("bpmn2:task")) {
bImixsTask = false;
taskCache.put(bpmnID, currentEntity);
}
if (qName.equalsIgnoreCase("bpmn2:extensionElements")) {
bExtensionElements = false;
}
// end of bpmn2:intermediateCatchEvent -
if (bImixsEvent && (qName.equalsIgnoreCase("bpmn2:intermediateCatchEvent")
|| qName.equalsIgnoreCase("bpmn2:intermediateThrowEvent"))) {
bImixsEvent = false;
// we need to cache the activities because the sequenceflows must be
// analysed later
eventCache.put(bpmnID, currentEntity);
}
/*
* End of a imixs:value
*/
if (qName.equalsIgnoreCase("imixs:value")) {
if (bExtensionElements && bItemValue && currentEntity != null && characterStream != null) {
String svalue = characterStream.toString();
List valueList = currentEntity.getItemValue(currentItemName);
if ("xs:boolean".equals(currentItemType.toLowerCase())) {
valueList.add(Boolean.valueOf(svalue));
} else if ("xs:integer".equals(currentItemType.toLowerCase())) {
valueList.add(Integer.valueOf(svalue));
} else {
valueList.add(svalue);
}
// item will only be added if it is not listed in the ignoreItem
// List!
if (!ignoreItemList.contains(currentItemName)) {
currentEntity.replaceItemValue(currentItemName, valueList);
}
}
bItemValue = false;
characterStream = null;
}
if (qName.equalsIgnoreCase("bpmn2:documentation")) {
if (currentEntity != null) {
currentEntity.replaceItemValue("rtfdescription", characterStream.toString());
}
// bpmn2:message?
if (bMessage) {
// cache the message...
messageCache.put(currentMessageName, characterStream.toString());
bMessage = false;
}
// bpmn2:annotation?
if (bAnnotation) {
// cache the annotation
annotationCache.put(currentAnnotationName, characterStream.toString());
bAnnotation = false;
}
characterStream = null;
bdocumentation = false;
}
// end of bpmn2:intermediateThrowEvent -
if (bLinkThrowEvent && !bLinkCatchEvent && (qName.equalsIgnoreCase("bpmn2:linkEventDefinition"))) {
bLinkThrowEvent = false;
// we need to cache the link name
linkThrowEventCache.put(bpmnID, currentLinkName);
}
// end of bpmn2:intermediateCatchEvent -
if (bLinkCatchEvent && !bLinkThrowEvent && (qName.equalsIgnoreCase("bpmn2:linkEventDefinition"))) {
bLinkCatchEvent = false;
// we need to cache the link name
linkCatchEventCache.put(currentLinkName, bpmnID);
}
}
#location 73
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void removeDocument(String uniqueID) throws LuceneException {
IndexWriter awriter = null;
long ltime = System.currentTimeMillis();
try {
awriter = createIndexWriter();
Term term = new Term("$uniqueid", uniqueID);
awriter.deleteDocuments(term);
} catch (CorruptIndexException e) {
throw new LuceneException(INVALID_INDEX, "Unable to remove workitem '" + uniqueID + "' from search index",
e);
} catch (LockObtainFailedException e) {
throw new LuceneException(INVALID_INDEX, "Unable to remove workitem '" + uniqueID + "' from search index",
e);
} catch (IOException e) {
throw new LuceneException(INVALID_INDEX, "Unable to remove workitem '" + uniqueID + "' from search index",
e);
}
finally {
// close writer!
if (awriter != null) {
logger.fine("lucene close IndexWriter...");
try {
awriter.close();
} catch (CorruptIndexException e) {
throw new LuceneException(INVALID_INDEX, "Unable to close lucene IndexWriter: ", e);
} catch (IOException e) {
throw new LuceneException(INVALID_INDEX, "Unable to close lucene IndexWriter: ", e);
}
}
}
logger.fine("lucene removeDocument in " + (System.currentTimeMillis() - ltime) + " ms");
}
|
#vulnerable code
public void removeDocument(String uniqueID) throws LuceneException {
IndexWriter awriter = null;
try {
awriter = createIndexWriter();
Term term = new Term("$uniqueid", uniqueID);
awriter.deleteDocuments(term);
} catch (CorruptIndexException e) {
throw new LuceneException(INVALID_INDEX, "Unable to remove workitem '" + uniqueID + "' from search index",
e);
} catch (LockObtainFailedException e) {
throw new LuceneException(INVALID_INDEX, "Unable to remove workitem '" + uniqueID + "' from search index",
e);
} catch (IOException e) {
throw new LuceneException(INVALID_INDEX, "Unable to remove workitem '" + uniqueID + "' from search index",
e);
}
}
#location 6
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testMinusWorkdaysFromFriday() {
Calendar startDate = Calendar.getInstance();
// adjust to FRIDAY
startDate.set(Calendar.DAY_OF_WEEK, Calendar.FRIDAY);
System.out.println("Startdate=" + startDate.getTime());
// adjust -3 Workdays -> THUSEDAY
Assert.assertEquals(Calendar.THURSDAY,
workflowSchedulerService.addWorkDays(startDate, -1).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.WEDNESDAY,
workflowSchedulerService.addWorkDays(startDate, -2).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.MONDAY,
workflowSchedulerService.addWorkDays(startDate, -4).get(Calendar.DAY_OF_WEEK));
// friday - 5
Assert.assertEquals(Calendar.FRIDAY,
workflowSchedulerService.addWorkDays(startDate, -5).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.MONDAY,
workflowSchedulerService.addWorkDays(startDate, -9).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.FRIDAY,
workflowSchedulerService.addWorkDays(startDate, -10).get(Calendar.DAY_OF_WEEK));
}
|
#vulnerable code
@Test
public void testMinusWorkdaysFromFriday() {
Calendar startDate = Calendar.getInstance();
// adjust to FRIDAY
startDate.set(Calendar.DAY_OF_WEEK, Calendar.FRIDAY);
System.out.println("Startdate=" + startDate.getTime());
// adjust -3 Workdays -> THUSEDAY
Assert.assertEquals(Calendar.THURSDAY,
WorkflowSchedulerService.addWorkDays(startDate, -1).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.WEDNESDAY,
WorkflowSchedulerService.addWorkDays(startDate, -2).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.MONDAY,
WorkflowSchedulerService.addWorkDays(startDate, -4).get(Calendar.DAY_OF_WEEK));
// friday - 5
Assert.assertEquals(Calendar.FRIDAY,
WorkflowSchedulerService.addWorkDays(startDate, -5).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.MONDAY,
WorkflowSchedulerService.addWorkDays(startDate, -9).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.FRIDAY,
WorkflowSchedulerService.addWorkDays(startDate, -10).get(Calendar.DAY_OF_WEEK));
}
#location 13
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
@Category(org.imixs.workflow.ItemCollection.class)
public void testFileData() {
ItemCollection itemColSource = new ItemCollection();
// add a dummy file
byte[] empty = { 0 };
itemColSource.addFileData(new FileData( "test1.txt", empty,"application/xml",null));
ItemCollection itemColTarget = new ItemCollection();
itemColTarget.addFileData(itemColSource.getFileData("test1.txt"));
FileData filedata = itemColTarget.getFileData("test1.txt");
Assert.assertNotNull(filedata);
Assert.assertEquals("test1.txt", filedata.getName());
Assert.assertEquals("application/xml", filedata.getContentType());
// test the byte content of itemColSource
byte[] file1Data1 =itemColSource.getFileData("test1.txt").getContent();
// we expect the new dummy array { 1, 2, 3 }
Assert.assertArrayEquals(empty, file1Data1);
// test the byte content of itemColTarget
file1Data1 = itemColTarget.getFileData("test1.txt").getContent();
// we expect the new dummy array { 1, 2, 3 }
Assert.assertArrayEquals(empty, file1Data1);
}
|
#vulnerable code
@Test
@Category(org.imixs.workflow.ItemCollection.class)
public void testFileData() {
ItemCollection itemColSource = new ItemCollection();
// add a dummy file
byte[] empty = { 0 };
itemColSource.addFile(empty, "test1.txt", "application/xml");
ItemCollection itemColTarget = new ItemCollection();
itemColTarget.addFileData(itemColSource.getFileData("test1.txt"));
FileData filedata = itemColTarget.getFileData("test1.txt");
Assert.assertNotNull(filedata);
Assert.assertEquals("test1.txt", filedata.getName());
Assert.assertEquals("application/xml", filedata.getContentType());
// test the byte content of itemColSource
Map<String, List<Object>> conedFiles1 = itemColSource.getFiles();
List<Object> fileContent1 = conedFiles1.get("test1.txt");
byte[] file1Data1 = (byte[]) fileContent1.get(1);
// we expect the new dummy array { 1, 2, 3 }
Assert.assertArrayEquals(empty, file1Data1);
// test the byte content of itemColTarget
conedFiles1 = itemColTarget.getFiles();
fileContent1 = conedFiles1.get("test1.txt");
file1Data1 = (byte[]) fileContent1.get(1);
// we expect the new dummy array { 1, 2, 3 }
Assert.assertArrayEquals(empty, file1Data1);
}
#location 21
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testParseResult() {
List<ItemCollection> result=null;
String testString = "{\n" +
" \"responseHeader\":{\n" +
" \"status\":0,\n" +
" \"QTime\":4,\n" +
" \"params\":{\n" +
" \"q\":\"*:*\",\n" +
" \"_\":\"1567286252995\"}},\n" +
" \"response\":{\"numFound\":2,\"start\":0,\"docs\":[\n" +
" {\n" +
" \"type\":[\"model\"],\n" +
" \"id\":\"3a182d18-33d9-4951-8970-d9eaf9d337ff\",\n" +
" \"_modified\":[20190831211617],\n" +
" \"_created\":[20190831211617],\n" +
" \"_version_\":1643418672068296704},\n" +
" {\n" +
" \"type\":[\"adminp\"],\n" +
" \"id\":\"60825929-4d7d-4346-9333-afd7dbfca457\",\n" +
" \"_modified\":[20190831211618],\n" +
" \"_created\":[20190831211618],\n" +
" \"_version_\":1643418672172105728}]\n" +
" }}";
result=solrSearchService.parseQueryResult(testString);
Assert.assertEquals(2,result.size());
ItemCollection document=null;
document=result.get(0);
Assert.assertEquals("model", document.getItemValueString("type"));
Assert.assertEquals("3a182d18-33d9-4951-8970-d9eaf9d337ff", document.getItemValueString("id"));
Assert.assertEquals(1567278977000l, document.getItemValueDate("_modified").getTime());
Assert.assertEquals(1567278977000l, document.getItemValueDate("_created").getTime());
document=result.get(1);
Assert.assertEquals("adminp", document.getItemValueString("type"));
Assert.assertEquals("60825929-4d7d-4346-9333-afd7dbfca457",document.getItemValueString("id"));
Assert.assertEquals(1567278978000l, document.getItemValueDate("_created").getTime());
}
|
#vulnerable code
@Test
public void testParseResult() {
List<ItemCollection> result=null;
String testString = "{\n" +
" \"responseHeader\":{\n" +
" \"status\":0,\n" +
" \"QTime\":4,\n" +
" \"params\":{\n" +
" \"q\":\"*:*\",\n" +
" \"_\":\"1567286252995\"}},\n" +
" \"response\":{\"numFound\":2,\"start\":0,\"docs\":[\n" +
" {\n" +
" \"type\":[\"model\"],\n" +
" \"id\":\"3a182d18-33d9-4951-8970-d9eaf9d337ff\",\n" +
" \"_modified\":[20190831211617],\n" +
" \"_created\":[20190831211617],\n" +
" \"_version_\":1643418672068296704},\n" +
" {\n" +
" \"type\":[\"adminp\"],\n" +
" \"id\":\"60825929-4d7d-4346-9333-afd7dbfca457\",\n" +
" \"_modified\":[20190831211618],\n" +
" \"_created\":[20190831211618],\n" +
" \"_version_\":1643418672172105728}]\n" +
" }}";
result=solrSearchService.parseQueryResult(testString);
Assert.assertEquals(2,result.size());
ItemCollection document=null;
document=result.get(0);
Assert.assertEquals("model", document.getItemValueString("type"));
Assert.assertEquals("3a182d18-33d9-4951-8970-d9eaf9d337ff", document.getUniqueID());
Assert.assertEquals(1567278977000l, document.getItemValueDate("$modified").getTime());
Assert.assertEquals(1567278977000l, document.getItemValueDate("$created").getTime());
document=result.get(1);
Assert.assertEquals("adminp", document.getItemValueString("type"));
Assert.assertEquals("60825929-4d7d-4346-9333-afd7dbfca457", document.getUniqueID());
Assert.assertEquals(1567278978000l, document.getItemValueDate("$created").getTime());
}
#location 36
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public ItemCollection load(String id) {
long lLoadTime = System.currentTimeMillis();
Document persistedDocument = null;
if (id == null || id.isEmpty()) {
return null;
}
persistedDocument = manager.find(Document.class, id);
// create instance of ItemCollection
if (persistedDocument != null && isCallerReader(persistedDocument)) {
ItemCollection result = null;// new ItemCollection();
if (persistedDocument.isPending()) {
// we clone but do not detach
logger.finest("......clone manged entity '" + id + "' pending status=" + persistedDocument.isPending());
result = new ItemCollection(persistedDocument.getData());
} else {
// the document is not managed, so we detach it
result = new ItemCollection();
result.setAllItems(persistedDocument.getData());
manager.detach(persistedDocument);
}
// if disable Optimistic Locking is TRUE we do not add the version
// number
if (disableOptimisticLocking) {
result.removeItem("$Version");
} else {
result.replaceItemValue("$Version", persistedDocument.getVersion());
}
// update the $isauthor flag
result.replaceItemValue("$isauthor", isCallerAuthor(persistedDocument));
// fire event
if (documentEvents != null) {
documentEvents.fire(new DocumentEvent(result, DocumentEvent.ON_DOCUMENT_LOAD));
} else {
logger.warning("Missing CDI support for Event<DocumentEvent> !");
}
logger.fine(
"...'" + result.getUniqueID() + "' loaded in " + (System.currentTimeMillis() - lLoadTime) + "ms");
return result;
} else
return null;
}
|
#vulnerable code
public ItemCollection load(String id) {
long lLoadTime = System.currentTimeMillis();
Document persistedDocument = null;
if (id==null || id.isEmpty()) {
return null;
}
persistedDocument = manager.find(Document.class, id);
// create instance of ItemCollection
if (persistedDocument != null && isCallerReader(persistedDocument)) {
ItemCollection result = null;// new ItemCollection();
if (persistedDocument.isPending()) {
// we clone but do not detach
logger.finest("......clone manged entity '" + id + "' pending status=" + persistedDocument.isPending());
result = new ItemCollection(persistedDocument.getData());
} else {
// the document is not managed, so we detach it
result = new ItemCollection();
result.setAllItems(persistedDocument.getData());
manager.detach(persistedDocument);
}
// if disable Optimistic Locking is TRUE we do not add the version
// number
if (disableOptimisticLocking) {
result.removeItem("$Version");
} else {
result.replaceItemValue("$Version", persistedDocument.getVersion());
}
// update the $isauthor flag
result.replaceItemValue("$isauthor", isCallerAuthor(persistedDocument));
// fire event
if (events != null) {
events.fire(new DocumentEvent(result, DocumentEvent.ON_DOCUMENT_LOAD));
} else {
logger.warning("Missing CDI support for Event<DocumentEvent> !");
}
logger.fine(
"...'" + result.getUniqueID() + "' loaded in " + (System.currentTimeMillis() - lLoadTime) + "ms");
return result;
} else
return null;
}
#location 43
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testMinusWorkdaysFromFriday() {
Calendar startDate = Calendar.getInstance();
// adjust to FRIDAY
startDate.set(Calendar.DAY_OF_WEEK, Calendar.FRIDAY);
System.out.println("Startdate=" + startDate.getTime());
// adjust -3 Workdays -> THUSEDAY
Assert.assertEquals(Calendar.THURSDAY,
workflowSchedulerService.addWorkDays(startDate, -1).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.WEDNESDAY,
workflowSchedulerService.addWorkDays(startDate, -2).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.MONDAY,
workflowSchedulerService.addWorkDays(startDate, -4).get(Calendar.DAY_OF_WEEK));
// friday - 5
Assert.assertEquals(Calendar.FRIDAY,
workflowSchedulerService.addWorkDays(startDate, -5).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.MONDAY,
workflowSchedulerService.addWorkDays(startDate, -9).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.FRIDAY,
workflowSchedulerService.addWorkDays(startDate, -10).get(Calendar.DAY_OF_WEEK));
}
|
#vulnerable code
@Test
public void testMinusWorkdaysFromFriday() {
Calendar startDate = Calendar.getInstance();
// adjust to FRIDAY
startDate.set(Calendar.DAY_OF_WEEK, Calendar.FRIDAY);
System.out.println("Startdate=" + startDate.getTime());
// adjust -3 Workdays -> THUSEDAY
Assert.assertEquals(Calendar.THURSDAY,
WorkflowSchedulerService.addWorkDays(startDate, -1).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.WEDNESDAY,
WorkflowSchedulerService.addWorkDays(startDate, -2).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.MONDAY,
WorkflowSchedulerService.addWorkDays(startDate, -4).get(Calendar.DAY_OF_WEEK));
// friday - 5
Assert.assertEquals(Calendar.FRIDAY,
WorkflowSchedulerService.addWorkDays(startDate, -5).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.MONDAY,
WorkflowSchedulerService.addWorkDays(startDate, -9).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.FRIDAY,
WorkflowSchedulerService.addWorkDays(startDate, -10).get(Calendar.DAY_OF_WEEK));
}
#location 20
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@SuppressWarnings("unchecked")
public void replaceAllItems(Map<String, List<Object>> map) {
if (map == null) {
return;
}
// make a deep copy of the map
Map<String, List<Object>> clonedMap = (Map<String, List<Object>>) deepCopyOfMap(map);
if (clonedMap != null) {
Iterator<?> it = clonedMap.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<String, List<Object>> entry = (Map.Entry<String, List<Object>>) it.next();
replaceItemValue(entry.getKey().toString(), entry.getValue());
}
}
}
|
#vulnerable code
@SuppressWarnings("unchecked")
public void replaceAllItems(Map<String, List<Object>> map) {
// make a deep copy of the map
Map<String, List<Object>> clonedMap = (Map<String, List<Object>>) deepCopyOfMap(map);
Iterator<?> it = clonedMap.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<String, List<Object>> entry = (Map.Entry<String, List<Object>>) it.next();
replaceItemValue(entry.getKey().toString(), entry.getValue());
}
}
#location 5
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testAddWorkdaysFromSunday() {
Calendar startDate = Calendar.getInstance();
// adjust to SATURDAY
startDate.set(Calendar.DAY_OF_WEEK, Calendar.SUNDAY);
System.out.println("Startdate=" + startDate.getTime());
// adjust -1 Workdays -> TUESDAY
Assert.assertEquals(Calendar.TUESDAY,
workflowSchedulerService.addWorkDays(startDate, 1).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.FRIDAY,
workflowSchedulerService.addWorkDays(startDate, 4).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.MONDAY,
workflowSchedulerService.addWorkDays(startDate, 5).get(Calendar.DAY_OF_WEEK));
}
|
#vulnerable code
@Test
public void testAddWorkdaysFromSunday() {
Calendar startDate = Calendar.getInstance();
// adjust to SATURDAY
startDate.set(Calendar.DAY_OF_WEEK, Calendar.SUNDAY);
System.out.println("Startdate=" + startDate.getTime());
// adjust -1 Workdays -> TUESDAY
Assert.assertEquals(Calendar.TUESDAY,
WorkflowSchedulerService.addWorkDays(startDate, 1).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.FRIDAY,
WorkflowSchedulerService.addWorkDays(startDate, 4).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.MONDAY,
WorkflowSchedulerService.addWorkDays(startDate, 5).get(Calendar.DAY_OF_WEEK));
}
#location 11
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testMinusWorkdaysFromFriday() {
Calendar startDate = Calendar.getInstance();
// adjust to FRIDAY
startDate.set(Calendar.DAY_OF_WEEK, Calendar.FRIDAY);
System.out.println("Startdate=" + startDate.getTime());
// adjust -3 Workdays -> THUSEDAY
Assert.assertEquals(Calendar.THURSDAY,
workflowSchedulerService.addWorkDays(startDate, -1).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.WEDNESDAY,
workflowSchedulerService.addWorkDays(startDate, -2).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.MONDAY,
workflowSchedulerService.addWorkDays(startDate, -4).get(Calendar.DAY_OF_WEEK));
// friday - 5
Assert.assertEquals(Calendar.FRIDAY,
workflowSchedulerService.addWorkDays(startDate, -5).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.MONDAY,
workflowSchedulerService.addWorkDays(startDate, -9).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.FRIDAY,
workflowSchedulerService.addWorkDays(startDate, -10).get(Calendar.DAY_OF_WEEK));
}
|
#vulnerable code
@Test
public void testMinusWorkdaysFromFriday() {
Calendar startDate = Calendar.getInstance();
// adjust to FRIDAY
startDate.set(Calendar.DAY_OF_WEEK, Calendar.FRIDAY);
System.out.println("Startdate=" + startDate.getTime());
// adjust -3 Workdays -> THUSEDAY
Assert.assertEquals(Calendar.THURSDAY,
WorkflowSchedulerService.addWorkDays(startDate, -1).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.WEDNESDAY,
WorkflowSchedulerService.addWorkDays(startDate, -2).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.MONDAY,
WorkflowSchedulerService.addWorkDays(startDate, -4).get(Calendar.DAY_OF_WEEK));
// friday - 5
Assert.assertEquals(Calendar.FRIDAY,
WorkflowSchedulerService.addWorkDays(startDate, -5).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.MONDAY,
WorkflowSchedulerService.addWorkDays(startDate, -9).get(Calendar.DAY_OF_WEEK));
Assert.assertEquals(Calendar.FRIDAY,
WorkflowSchedulerService.addWorkDays(startDate, -10).get(Calendar.DAY_OF_WEEK));
}
#location 15
#vulnerability type NULL_DEREFERENCE
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.