instruction
stringclasses
1 value
output
stringlengths
64
69.4k
input
stringlengths
205
32.4k
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private void initEdgeEnvsFwd() { // TODO(user): Revisit what we throw away after the bwd analysis DiGraphNode<Node, ControlFlowGraph.Branch> entry = cfg.getEntry(); DiGraphEdge<Node, ControlFlowGraph.Branch> entryOutEdge = cfg.getOutEdges(entry.getValue()).get(0); TypeEnv entryEnv = envs.get(entryOutEdge); initEdgeEnvs(new TypeEnv()); // For function scopes, add the formal parameters and the free variables // from outer scopes to the environment. if (currentScope.isFunction()) { Set<String> formalsAndOuters = currentScope.getOuterVars(); formalsAndOuters.addAll(currentScope.getFormals()); if (currentScope.hasThis()) { formalsAndOuters.add("this"); } for (String name : formalsAndOuters) { JSType declType = currentScope.getDeclaredTypeOf(name); JSType initType; if (declType == null) { initType = envGetType(entryEnv, name); } else if (declType.getFunTypeIfSingletonObj() != null && declType.getFunTypeIfSingletonObj().isConstructor()) { initType = declType.getFunTypeIfSingletonObj().createConstructorObject(); } else { initType = declType; } entryEnv = envPutType(entryEnv, name, initType.withLocation(name)); } entryEnv = envPutType(entryEnv, RETVAL_ID, JSType.UNDEFINED); } // For all scopes, add local variables and (local) function definitions // to the environment. for (String local : currentScope.getLocals()) { entryEnv = envPutType(entryEnv, local, JSType.UNDEFINED); } for (String fnName : currentScope.getLocalFunDefs()) { JSType summaryType = summaries.get(currentScope.getScope(fnName)); FunctionType fnType = summaryType.getFunType(); if (fnType.isConstructor()) { summaryType = fnType.createConstructorObject(); } else { summaryType = summaryType.withProperty( new QualifiedName("prototype"), JSType.TOP_OBJECT); } entryEnv = envPutType(entryEnv, fnName, summaryType); } println("Keeping env: ", entryEnv); envs.put(entryOutEdge, entryEnv); }
#vulnerable code private void initEdgeEnvsFwd() { // TODO(user): Revisit what we throw away after the bwd analysis DiGraphNode<Node, ControlFlowGraph.Branch> entry = cfg.getEntry(); DiGraphEdge<Node, ControlFlowGraph.Branch> entryOutEdge = cfg.getOutEdges(entry.getValue()).get(0); TypeEnv entryEnv = envs.get(entryOutEdge); initEdgeEnvs(new TypeEnv()); // For function scopes, add the formal parameters and the free variables // from outer scopes to the environment. if (currentScope.isFunction()) { Set<String> formalsAndOuters = currentScope.getOuterVars(); formalsAndOuters.addAll(currentScope.getFormals()); if (currentScope.hasThis()) { formalsAndOuters.add("this"); } for (String name : formalsAndOuters) { JSType declType = currentScope.getDeclaredTypeOf(name); JSType initType; if (declType == null) { initType = envGetType(entryEnv, name); } else if (declType.getFunTypeIfSingletonObj() != null && declType.getFunTypeIfSingletonObj().isConstructor()) { initType = declType.getFunTypeIfSingletonObj().createConstructorObject(); } else { initType = declType; } entryEnv = envPutType(entryEnv, name, initType.withLocation(name)); } entryEnv = envPutType(entryEnv, RETVAL_ID, JSType.UNDEFINED); } // For all scopes, add local variables and (local) function definitions // to the environment. for (String local : currentScope.getLocals()) { entryEnv = envPutType(entryEnv, local, JSType.UNDEFINED); } for (String fnName : currentScope.getLocalFunDefs()) { JSType summaryType = summaries.get(currentScope.getScope(fnName)); FunctionType fnType = summaryType.getFunType(); if (fnType.isConstructor()) { summaryType = fnType.createConstructorObject(); } else { summaryType = summaryType.withProperty("prototype", JSType.TOP_OBJECT); } entryEnv = envPutType(entryEnv, fnName, summaryType); } println("Keeping env: ", entryEnv); envs.put(entryOutEdge, entryEnv); } #location 29 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private void removeUnreferencedVars() { CodingConvention convention = codingConvention; for (Iterator<Var> it = maybeUnreferenced.iterator(); it.hasNext(); ) { Var var = it.next(); // Remove calls to inheritance-defining functions where the unreferenced // class is the subclass. for (Node exprCallNode : inheritsCalls.get(var)) { NodeUtil.removeChild(exprCallNode.getParent(), exprCallNode); compiler.reportCodeChange(); } // Regardless of what happens to the original declaration, // we need to remove all assigns, because they may contain references // to other unreferenced variables. removeAllAssigns(var); compiler.addToDebugLog("Unreferenced var: " + var.name); Node nameNode = var.nameNode; Node toRemove = nameNode.getParent(); Node parent = toRemove.getParent(); Preconditions.checkState( toRemove.getType() == Token.VAR || toRemove.getType() == Token.FUNCTION || toRemove.getType() == Token.LP && parent.getType() == Token.FUNCTION, "We should only declare vars and functions and function args"); if (toRemove.getType() == Token.LP && parent.getType() == Token.FUNCTION) { // Don't remove function arguments here. That's a special case // that's taken care of in removeUnreferencedFunctionArgs. } else if (NodeUtil.isFunctionExpression(toRemove)) { if (!preserveFunctionExpressionNames) { toRemove.getFirstChild().setString(""); compiler.reportCodeChange(); } // Don't remove bleeding functions. } else if (parent != null && parent.getType() == Token.FOR && parent.getChildCount() < 4) { // foreach iterations have 3 children. Leave them alone. } else if (toRemove.getType() == Token.VAR && nameNode.hasChildren() && NodeUtil.mayHaveSideEffects(nameNode.getFirstChild())) { // If this is a single var declaration, we can at least remove the // declaration itself and just leave the value, e.g., // var a = foo(); => foo(); if (toRemove.getChildCount() == 1) { parent.replaceChild(toRemove, new Node(Token.EXPR_RESULT, nameNode.removeFirstChild())); compiler.reportCodeChange(); } } else if (toRemove.getType() == Token.VAR && toRemove.getChildCount() > 1) { // For var declarations with multiple names (i.e. var a, b, c), // only remove the unreferenced name toRemove.removeChild(nameNode); compiler.reportCodeChange(); } else if (parent != null) { NodeUtil.removeChild(parent, toRemove); compiler.reportCodeChange(); } } }
#vulnerable code private void removeUnreferencedVars() { CodingConvention convention = compiler.getCodingConvention(); for (Iterator<Var> it = maybeUnreferenced.iterator(); it.hasNext(); ) { Var var = it.next(); // Regardless of what happens to the original declaration, // we need to remove all assigns, because they may contain references // to other unreferenced variables. removeAllAssigns(var); compiler.addToDebugLog("Unreferenced var: " + var.name); Node nameNode = var.nameNode; Node toRemove = nameNode.getParent(); Node parent = toRemove.getParent(); Preconditions.checkState( toRemove.getType() == Token.VAR || toRemove.getType() == Token.FUNCTION || toRemove.getType() == Token.LP && parent.getType() == Token.FUNCTION, "We should only declare vars and functions and function args"); if (toRemove.getType() == Token.LP && parent.getType() == Token.FUNCTION) { // Don't remove function arguments here. That's a special case // that's taken care of in removeUnreferencedFunctionArgs. } else if (NodeUtil.isFunctionExpression(toRemove)) { if (!preserveFunctionExpressionNames) { toRemove.getFirstChild().setString(""); compiler.reportCodeChange(); } // Don't remove bleeding functions. } else if (parent != null && parent.getType() == Token.FOR && parent.getChildCount() < 4) { // foreach iterations have 3 children. Leave them alone. } else if (toRemove.getType() == Token.VAR && nameNode.hasChildren() && NodeUtil.mayHaveSideEffects(nameNode.getFirstChild())) { // If this is a single var declaration, we can at least remove the // declaration itself and just leave the value, e.g., // var a = foo(); => foo(); if (toRemove.getChildCount() == 1) { parent.replaceChild(toRemove, new Node(Token.EXPR_RESULT, nameNode.removeFirstChild())); compiler.reportCodeChange(); } } else if (toRemove.getType() == Token.VAR && toRemove.getChildCount() > 1) { // For var declarations with multiple names (i.e. var a, b, c), // only remove the unreferenced name toRemove.removeChild(nameNode); compiler.reportCodeChange(); } else if (parent != null) { NodeUtil.removeChild(parent, toRemove); compiler.reportCodeChange(); } } } #location 45 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private void outputTracerReport() { JvmMetrics.maybeWriteJvmMetrics(this.err, "verbose:pretty:all"); OutputStreamWriter output = new OutputStreamWriter(this.err); try { int runtime = 0; int runs = 0; int changes = 0; int diff = 0; int gzDiff = 0; // header output.write("Summary:\n"); output.write("pass,runtime,runs,chancingRuns,reduction,gzReduction\n"); Map<String, Stats> runtimeMap = compiler.tracker.getStats(); for (Entry<String, Stats> entry : runtimeMap.entrySet()) { String key = entry.getKey(); Stats stats = entry.getValue(); output.write(key); output.write(","); output.write(String.valueOf(stats.runtime)); runtime += stats.runtime; output.write(","); output.write(String.valueOf(stats.runs)); runs += stats.runs; output.write(","); output.write(String.valueOf(stats.changes)); changes += stats.changes; output.write(","); output.write(String.valueOf(stats.diff)); diff += stats.diff; output.write(","); output.write(String.valueOf(stats.gzDiff)); gzDiff += stats.gzDiff; output.write("\n"); } output.write("TOTAL"); output.write(","); output.write(String.valueOf(runtime)); output.write(","); output.write(String.valueOf(runs)); output.write(","); output.write(String.valueOf(changes)); output.write(","); output.write(String.valueOf(diff)); output.write(","); output.write(String.valueOf(gzDiff)); output.write("\n"); output.write("\n"); output.write("Log:\n"); output.write( "pass,runtime,runs,chancingRuns,reduction,gzReduction,size,gzSize\n"); List<Stats> runtimeLog = compiler.tracker.getLog(); for (Stats stats : runtimeLog) { output.write(stats.pass); output.write(","); output.write(String.valueOf(stats.runtime)); output.write(","); output.write(String.valueOf(stats.runs)); output.write(","); output.write(String.valueOf(stats.changes)); output.write(","); output.write(String.valueOf(stats.diff)); output.write(","); output.write(String.valueOf(stats.gzDiff)); output.write(","); output.write(String.valueOf(stats.size)); output.write(","); output.write(String.valueOf(stats.gzSize)); output.write("\n"); } output.write("\n"); output.close(); } catch (IOException e) { e.printStackTrace(); } }
#vulnerable code private void outputTracerReport() { OutputStreamWriter output = new OutputStreamWriter(this.err); try { int runtime = 0; int runs = 0; int changes = 0; int diff = 0; int gzDiff = 0; // header output.write("Summary:\n"); output.write("pass,runtime,runs,chancingRuns,reduction,gzReduction\n"); Map<String, Stats> runtimeMap = compiler.tracker.getStats(); for (Entry<String, Stats> entry : runtimeMap.entrySet()) { String key = entry.getKey(); Stats stats = entry.getValue(); output.write(key); output.write(","); output.write(String.valueOf(stats.runtime)); runtime += stats.runtime; output.write(","); output.write(String.valueOf(stats.runs)); runs += stats.runs; output.write(","); output.write(String.valueOf(stats.changes)); changes += stats.changes; output.write(","); output.write(String.valueOf(stats.diff)); diff += stats.diff; output.write(","); output.write(String.valueOf(stats.gzDiff)); gzDiff += stats.gzDiff; output.write("\n"); } output.write("TOTAL"); output.write(","); output.write(String.valueOf(runtime)); output.write(","); output.write(String.valueOf(runs)); output.write(","); output.write(String.valueOf(changes)); output.write(","); output.write(String.valueOf(diff)); output.write(","); output.write(String.valueOf(gzDiff)); output.write("\n"); output.write("\n"); output.write("Log:\n"); output.write( "pass,runtime,runs,chancingRuns,reduction,gzReduction,size,gzSize\n"); List<Stats> runtimeLog = compiler.tracker.getLog(); for (Stats stats : runtimeLog) { output.write(stats.pass); output.write(","); output.write(String.valueOf(stats.runtime)); output.write(","); output.write(String.valueOf(stats.runs)); output.write(","); output.write(String.valueOf(stats.changes)); output.write(","); output.write(String.valueOf(stats.diff)); output.write(","); output.write(String.valueOf(stats.gzDiff)); output.write(","); output.write(String.valueOf(stats.size)); output.write(","); output.write(String.valueOf(stats.gzSize)); output.write("\n"); } output.write("\n"); output.close(); } catch (IOException e) { e.printStackTrace(); } } #location 74 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private void escapeParameters(MustDef output) { for (Iterator<Var> i = jsScope.getVars(); i.hasNext();) { Var v = i.next(); if (isParameter(v)) { // Assume we no longer know where the parameter comes from // anymore. output.reachingDef.put(v, null); } } // Also, assume we no longer know anything that depends on a parameter. for (Entry<Var, Definition> pair: output.reachingDef.entrySet()) { Definition value = pair.getValue(); if (value == null) { continue; } for (Var dep : value.depends) { if (isParameter(dep)) { output.reachingDef.put(pair.getKey(), null); } } } }
#vulnerable code private void escapeParameters(MustDef output) { for (Iterator<Var> i = jsScope.getVars(); i.hasNext();) { Var v = i.next(); if (v.getParentNode().getType() == Token.LP) { // Assume we no longer know where the parameter comes from // anymore. output.reachingDef.put(v, null); } } } #location 4 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private void createPropertyScopeFor(Symbol s) { // In order to build a property scope for s, we will need to build // a property scope for all its implicit prototypes first. This means // that sometimes we will already have built its property scope // for a previous symbol. if (s.propertyScope != null) { return; } SymbolScope parentPropertyScope = null; ObjectType type = s.getType() == null ? null : s.getType().toObjectType(); if (type == null) { return; } ObjectType proto = type.getParentScope(); if (proto != null && proto != type && proto.getConstructor() != null) { Symbol parentSymbol = getSymbolForInstancesOf(proto.getConstructor()); if (parentSymbol != null) { createPropertyScopeFor(parentSymbol); parentPropertyScope = parentSymbol.getPropertyScope(); } } ObjectType instanceType = type; Iterable<String> propNames = type.getOwnPropertyNames(); if (instanceType.isFunctionPrototypeType()) { // Merge the properties of "Foo.prototype" and "new Foo()" together. instanceType = instanceType.getOwnerFunction().getInstanceType(); Set<String> set = Sets.newHashSet(propNames); Iterables.addAll(set, instanceType.getOwnPropertyNames()); propNames = set; } s.setPropertyScope(new SymbolScope(null, parentPropertyScope, type, s)); for (String propName : propNames) { StaticSlot<JSType> newProp = instanceType.getSlot(propName); if (newProp.getDeclaration() == null) { // Skip properties without declarations. We won't know how to index // them, because we index things by node. continue; } // We have symbol tables that do not do type analysis. They just try // to build a complete index of all objects in the program. So we might // already have symbols for things like "Foo.bar". If this happens, // throw out the old symbol and use the type-based symbol. Symbol oldProp = getScope(s).getSlot(s.getName() + "." + propName); if (oldProp != null) { removeSymbol(oldProp); } Symbol newSym = copySymbolTo(newProp, s.propertyScope); if (oldProp != null) { if (newSym.getJSDocInfo() == null) { newSym.setJSDocInfo(oldProp.getJSDocInfo()); } newSym.setPropertyScope(oldProp.propertyScope); for (Reference ref : oldProp.references.values()) { newSym.defineReferenceAt(ref.getNode()); } } } }
#vulnerable code private void createPropertyScopeFor(Symbol s) { // In order to build a property scope for s, we will need to build // a property scope for all its implicit prototypes first. This means // that sometimes we will already have built its property scope // for a previous symbol. if (s.propertyScope != null) { return; } SymbolScope parentPropertyScope = null; ObjectType type = s.getType().toObjectType(); ObjectType proto = type.getParentScope(); if (proto != null && proto != type && proto.getConstructor() != null) { Symbol parentSymbol = getSymbolForInstancesOf(proto.getConstructor()); if (parentSymbol != null) { createPropertyScopeFor(parentSymbol); parentPropertyScope = parentSymbol.getPropertyScope(); } } ObjectType instanceType = type; Iterable<String> propNames = type.getOwnPropertyNames(); if (instanceType.isFunctionPrototypeType()) { // Merge the properties of "Foo.prototype" and "new Foo()" together. instanceType = instanceType.getOwnerFunction().getInstanceType(); Set<String> set = Sets.newHashSet(propNames); Iterables.addAll(set, instanceType.getOwnPropertyNames()); propNames = set; } s.propertyScope = new SymbolScope(null, parentPropertyScope, type); for (String propName : propNames) { StaticSlot<JSType> newProp = instanceType.getSlot(propName); if (newProp.getDeclaration() == null) { // Skip properties without declarations. We won't know how to index // them, because we index things by node. continue; } // We have symbol tables that do not do type analysis. They just try // to build a complete index of all objects in the program. So we might // already have symbols for things like "Foo.bar". If this happens, // throw out the old symbol and use the type-based symbol. Symbol oldProp = getScope(s).getSlot(s.getName() + "." + propName); if (oldProp != null) { removeSymbol(oldProp); } Symbol newSym = copySymbolTo(newProp, s.propertyScope); if (oldProp != null) { if (newSym.getJSDocInfo() == null) { newSym.setJSDocInfo(oldProp.getJSDocInfo()); } newSym.propertyScope = oldProp.propertyScope; for (Reference ref : oldProp.references.values()) { newSym.defineReferenceAt(ref.getNode()); } } } } #location 12 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private FlowScope traverse(Node n, FlowScope scope) { switch (n.getType()) { case Token.ASSIGN: scope = traverseAssign(n, scope); break; case Token.NAME: scope = traverseName(n, scope); break; case Token.GETPROP: scope = traverseGetProp(n, scope); break; case Token.AND: scope = traverseAnd(n, scope).getJoinedFlowScope() .createChildFlowScope(); break; case Token.OR: scope = traverseOr(n, scope).getJoinedFlowScope() .createChildFlowScope(); break; case Token.HOOK: scope = traverseHook(n, scope); break; case Token.OBJECTLIT: scope = traverseObjectLiteral(n, scope); break; case Token.CALL: scope = traverseCall(n, scope); break; case Token.NEW: scope = traverseNew(n, scope); break; case Token.ASSIGN_ADD: case Token.ADD: scope = traverseAdd(n, scope); break; case Token.POS: case Token.NEG: scope = traverse(n.getFirstChild(), scope); // Find types. n.setJSType(getNativeType(NUMBER_TYPE)); break; case Token.ARRAYLIT: scope = traverseArrayLiteral(n, scope); break; case Token.THIS: n.setJSType(scope.getTypeOfThis()); break; case Token.ASSIGN_LSH: case Token.ASSIGN_RSH: case Token.LSH: case Token.RSH: case Token.ASSIGN_URSH: case Token.URSH: case Token.ASSIGN_DIV: case Token.ASSIGN_MOD: case Token.ASSIGN_BITAND: case Token.ASSIGN_BITXOR: case Token.ASSIGN_BITOR: case Token.ASSIGN_MUL: case Token.ASSIGN_SUB: case Token.DIV: case Token.MOD: case Token.BITAND: case Token.BITXOR: case Token.BITOR: case Token.MUL: case Token.SUB: case Token.DEC: case Token.INC: case Token.BITNOT: scope = traverseChildren(n, scope); n.setJSType(getNativeType(NUMBER_TYPE)); break; case Token.PARAM_LIST: scope = traverse(n.getFirstChild(), scope); n.setJSType(getJSType(n.getFirstChild())); break; case Token.COMMA: scope = traverseChildren(n, scope); n.setJSType(getJSType(n.getLastChild())); break; case Token.TYPEOF: scope = traverseChildren(n, scope); n.setJSType(getNativeType(STRING_TYPE)); break; case Token.DELPROP: case Token.LT: case Token.LE: case Token.GT: case Token.GE: case Token.NOT: case Token.EQ: case Token.NE: case Token.SHEQ: case Token.SHNE: case Token.INSTANCEOF: case Token.IN: scope = traverseChildren(n, scope); n.setJSType(getNativeType(BOOLEAN_TYPE)); break; case Token.GETELEM: scope = traverseGetElem(n, scope); break; case Token.EXPR_RESULT: scope = traverseChildren(n, scope); if (n.getFirstChild().isGetProp()) { ensurePropertyDeclared(n.getFirstChild()); } break; case Token.SWITCH: scope = traverse(n.getFirstChild(), scope); break; case Token.RETURN: scope = traverseReturn(n, scope); break; case Token.VAR: case Token.THROW: scope = traverseChildren(n, scope); break; case Token.CATCH: scope = traverseCatch(n, scope); break; case Token.CAST: scope = traverseChildren(n, scope); JSDocInfo info = n.getJSDocInfo(); if (info != null && info.hasType()) { n.setJSType(info.getType().evaluate(syntacticScope, registry)); } break; } return scope; }
#vulnerable code private FlowScope traverse(Node n, FlowScope scope) { switch (n.getType()) { case Token.ASSIGN: scope = traverseAssign(n, scope); break; case Token.NAME: scope = traverseName(n, scope); break; case Token.GETPROP: scope = traverseGetProp(n, scope); break; case Token.AND: scope = traverseAnd(n, scope).getJoinedFlowScope() .createChildFlowScope(); break; case Token.OR: scope = traverseOr(n, scope).getJoinedFlowScope() .createChildFlowScope(); break; case Token.HOOK: scope = traverseHook(n, scope); break; case Token.OBJECTLIT: scope = traverseObjectLiteral(n, scope); break; case Token.CALL: scope = traverseCall(n, scope); break; case Token.NEW: scope = traverseNew(n, scope); break; case Token.ASSIGN_ADD: case Token.ADD: scope = traverseAdd(n, scope); break; case Token.POS: case Token.NEG: scope = traverse(n.getFirstChild(), scope); // Find types. n.setJSType(getNativeType(NUMBER_TYPE)); break; case Token.ARRAYLIT: scope = traverseArrayLiteral(n, scope); break; case Token.THIS: n.setJSType(scope.getTypeOfThis()); break; case Token.ASSIGN_LSH: case Token.ASSIGN_RSH: case Token.LSH: case Token.RSH: case Token.ASSIGN_URSH: case Token.URSH: case Token.ASSIGN_DIV: case Token.ASSIGN_MOD: case Token.ASSIGN_BITAND: case Token.ASSIGN_BITXOR: case Token.ASSIGN_BITOR: case Token.ASSIGN_MUL: case Token.ASSIGN_SUB: case Token.DIV: case Token.MOD: case Token.BITAND: case Token.BITXOR: case Token.BITOR: case Token.MUL: case Token.SUB: case Token.DEC: case Token.INC: case Token.BITNOT: scope = traverseChildren(n, scope); n.setJSType(getNativeType(NUMBER_TYPE)); break; case Token.PARAM_LIST: scope = traverse(n.getFirstChild(), scope); n.setJSType(getJSType(n.getFirstChild())); break; case Token.COMMA: scope = traverseChildren(n, scope); n.setJSType(getJSType(n.getLastChild())); break; case Token.TYPEOF: scope = traverseChildren(n, scope); n.setJSType(getNativeType(STRING_TYPE)); break; case Token.DELPROP: case Token.LT: case Token.LE: case Token.GT: case Token.GE: case Token.NOT: case Token.EQ: case Token.NE: case Token.SHEQ: case Token.SHNE: case Token.INSTANCEOF: case Token.IN: scope = traverseChildren(n, scope); n.setJSType(getNativeType(BOOLEAN_TYPE)); break; case Token.GETELEM: scope = traverseGetElem(n, scope); break; case Token.EXPR_RESULT: scope = traverseChildren(n, scope); if (n.getFirstChild().isGetProp()) { ensurePropertyDeclared(n.getFirstChild()); } break; case Token.SWITCH: scope = traverse(n.getFirstChild(), scope); break; case Token.RETURN: scope = traverseReturn(n, scope); break; case Token.VAR: case Token.THROW: scope = traverseChildren(n, scope); break; case Token.CATCH: scope = traverseCatch(n, scope); break; case Token.CAST: scope = traverseChildren(n, scope); break; } // TODO(johnlenz): remove this after the CAST node change has shaken out. if (!n.isFunction()) { JSDocInfo info = n.getJSDocInfo(); if (info != null && info.hasType()) { JSType castType = info.getType().evaluate(syntacticScope, registry); // A stubbed type declaration on a qualified name should take // effect for all subsequent accesses of that name, // so treat it the same as an assign to that name. if (n.isQualifiedName() && n.getParent().isExprResult()) { updateScopeForTypeChange(scope, n, n.getJSType(), castType); } n.setJSType(castType); } } return scope; } #location 155 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private Node inlineReturnValue(Node callNode, Node fnNode) { Node block = fnNode.getLastChild(); Node callParentNode = callNode.getParent(); // NOTE: As the normalize pass guarantees globals aren't being // shadowed and an expression can't introduce new names, there is // no need to check for conflicts. // Create an argName -> expression map, checking for side effects. Map<String, Node> argMap = FunctionArgumentInjector.getFunctionCallParameterMap( fnNode, callNode, this.safeNameIdSupplier); Node newExpression; if (!block.hasChildren()) { Node srcLocation = block; newExpression = NodeUtil.newUndefinedNode(srcLocation); } else { Node returnNode = block.getFirstChild(); Preconditions.checkArgument(returnNode.getType() == Token.RETURN); // Clone the return node first. Node safeReturnNode = returnNode.cloneTree(); Node inlineResult = FunctionArgumentInjector.inject( null, safeReturnNode, null, argMap); Preconditions.checkArgument(safeReturnNode == inlineResult); newExpression = safeReturnNode.removeFirstChild(); } callParentNode.replaceChild(callNode, newExpression); return newExpression; }
#vulnerable code private Node inlineReturnValue(Node callNode, Node fnNode) { Node block = fnNode.getLastChild(); Node callParentNode = callNode.getParent(); // NOTE: As the normalize pass guarantees globals aren't being // shadowed and an expression can't introduce new names, there is // no need to check for conflicts. // Create an argName -> expression map, checking for side effects. Map<String, Node> argMap = FunctionArgumentInjector.getFunctionCallParameterMap( fnNode, callNode, this.safeNameIdSupplier); Node newExpression; if (!block.hasChildren()) { Node srcLocation = block; newExpression = NodeUtil.newUndefinedNode(srcLocation); } else { Node returnNode = block.getFirstChild(); Preconditions.checkArgument(returnNode.getType() == Token.RETURN); // Clone the return node first. Node safeReturnNode = returnNode.cloneTree(); Node inlineResult = FunctionArgumentInjector.inject( safeReturnNode, null, argMap); Preconditions.checkArgument(safeReturnNode == inlineResult); newExpression = safeReturnNode.removeFirstChild(); } callParentNode.replaceChild(callNode, newExpression); return newExpression; } #location 30 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private UndiGraph<Var, Void> computeVariableNamesInterferenceGraph( NodeTraversal t, ControlFlowGraph<Node> cfg, Set<Var> escaped) { UndiGraph<Var, Void> interferenceGraph = LinkedUndirectedGraph.create(); // For all variables V not in unsafeCrossRange, // LiveRangeChecker(V, X) and LiveRangeChecker(Y, V) will never add a edge // to the interferenceGraph. In other words, we don't need to use // LiveRangeChecker on variable pair (A, B) if both A and B are not // in the unsafeCrossRangeSet. See PrescreenCrossLiveRange for details. Set<Var> unsafeCrossRangeSet = Sets.newHashSet(); Scope scope = t.getScope(); for (DiGraphNode<Node, Branch> cfgNode : cfg.getDirectedGraphNodes()) { if (cfg.isImplicitReturn(cfgNode)) { continue; } for (Iterator<Var> i = scope.getVars(); i.hasNext();) { final Var v = i.next(); if (!unsafeCrossRangeSet.contains(v)) { FlowState<LiveVariableLattice> state = cfgNode.getAnnotation(); PrescreenCrossLiveRange check = new PrescreenCrossLiveRange(v, state.getOut()); NodeTraversal.traverse(compiler, cfgNode.getValue(), check); if (!check.isSafe()) { unsafeCrossRangeSet.add(v); } } } } // First create a node for each non-escaped variable. for (Iterator<Var> i = scope.getVars(); i.hasNext();) { Var v = i.next(); if (!escaped.contains(v)) { // TODO(user): In theory, we CAN coalesce function names just like // any variables. Our Liveness analysis captures this just like it as // described in the specification. However, we saw some zipped and // and unzipped size increase after this. We are not totally sure why // that is but, for now, we will respect the dead functions and not play // around with it. if (!NodeUtil.isFunction(v.getParentNode())) { interferenceGraph.createNode(v); } } } // Go through each variable and try to connect them. for (Iterator<Var> i1 = scope.getVars(); i1.hasNext();) { Var v1 = i1.next(); NEXT_VAR_PAIR: for (Iterator<Var> i2 = scope.getVars(); i2.hasNext();) { Var v2 = i2.next(); // Skip duplicate pairs. if (v1.index >= v2.index) { continue; } if (!interferenceGraph.hasNode(v1) || !interferenceGraph.hasNode(v2)) { // Skip nodes that were not added. They are globals and escaped // locals. Also avoid merging a variable with itself. continue NEXT_VAR_PAIR; } if (v1.getParentNode().getType() == Token.LP && v2.getParentNode().getType() == Token.LP) { interferenceGraph.connectIfNotFound(v1, null, v2); continue NEXT_VAR_PAIR; } // Go through every CFG node in the program and look at // this variable pair. If they are both live at the same // time, add an edge between them and continue to the next pair. NEXT_CROSS_CFG_NODE: for (DiGraphNode<Node, Branch> cfgNode : cfg.getDirectedGraphNodes()) { if (cfg.isImplicitReturn(cfgNode)) { continue NEXT_CROSS_CFG_NODE; } FlowState<LiveVariableLattice> state = cfgNode.getAnnotation(); // Check the live states and add edge when possible. if ((state.getIn().isLive(v1) && state.getIn().isLive(v2)) || (state.getOut().isLive(v1) && state.getOut().isLive(v2))) { interferenceGraph.connectIfNotFound(v1, null, v2); continue NEXT_VAR_PAIR; } } // v1 and v2 might not have an edge between them! woohoo. there's // one last sanity check that we have to do: we have to check // if there's a collision *within* the cfg node. if (!unsafeCrossRangeSet.contains(v1) && !unsafeCrossRangeSet.contains(v2)) { continue NEXT_VAR_PAIR; } NEXT_INTRA_CFG_NODE: for (DiGraphNode<Node, Branch> cfgNode : cfg.getDirectedGraphNodes()) { if (cfg.isImplicitReturn(cfgNode)) { continue NEXT_INTRA_CFG_NODE; } FlowState<LiveVariableLattice> state = cfgNode.getAnnotation(); boolean v1OutLive = state.getOut().isLive(v1); boolean v2OutLive = state.getOut().isLive(v2); CombinedLiveRangeChecker checker = new CombinedLiveRangeChecker( new LiveRangeChecker(v1, v2OutLive ? null : v2), new LiveRangeChecker(v2, v1OutLive ? null : v1)); NodeTraversal.traverse( compiler, cfgNode.getValue(), checker); if (checker.connectIfCrossed(interferenceGraph)) { continue NEXT_VAR_PAIR; } } } } return interferenceGraph; }
#vulnerable code private UndiGraph<Var, Void> computeVariableNamesInterferenceGraph( NodeTraversal t, ControlFlowGraph<Node> cfg, Set<Var> escaped) { UndiGraph<Var, Void> interferenceGraph = LinkedUndirectedGraph.create(); Scope scope = t.getScope(); // First create a node for each non-escaped variable. for (Iterator<Var> i = scope.getVars(); i.hasNext();) { Var v = i.next(); if (!escaped.contains(v)) { // TODO(user): In theory, we CAN coalesce function names just like // any variables. Our Liveness analysis captures this just like it as // described in the specification. However, we saw some zipped and // and unzipped size increase after this. We are not totally sure why // that is but, for now, we will respect the dead functions and not play // around with it. if (!NodeUtil.isFunction(v.getParentNode())) { interferenceGraph.createNode(v); } } } // Go through each variable and try to connect them. for (Iterator<Var> i1 = scope.getVars(); i1.hasNext();) { Var v1 = i1.next(); NEXT_VAR_PAIR: for (Iterator<Var> i2 = scope.getVars(); i2.hasNext();) { Var v2 = i2.next(); // Skip duplicate pairs. if (v1.index >= v2.index) { continue; } if (!interferenceGraph.hasNode(v1) || !interferenceGraph.hasNode(v2)) { // Skip nodes that were not added. They are globals and escaped // locals. Also avoid merging a variable with itself. continue NEXT_VAR_PAIR; } if (v1.getParentNode().getType() == Token.LP && v2.getParentNode().getType() == Token.LP) { interferenceGraph.connectIfNotFound(v1, null, v2); continue NEXT_VAR_PAIR; } // Go through every CFG node in the program and look at // this variable pair. If they are both live at the same // time, add an edge between them and continue to the next pair. NEXT_CROSS_CFG_NODE: for (DiGraphNode<Node, Branch> cfgNode : cfg.getDirectedGraphNodes()) { if (cfg.isImplicitReturn(cfgNode)) { continue NEXT_CROSS_CFG_NODE; } FlowState<LiveVariableLattice> state = cfgNode.getAnnotation(); // Check the live states and add edge when possible. if ((state.getIn().isLive(v1) && state.getIn().isLive(v2)) || (state.getOut().isLive(v1) && state.getOut().isLive(v2))) { interferenceGraph.connectIfNotFound(v1, null, v2); continue NEXT_VAR_PAIR; } } // v1 and v2 might not have an edge between them! woohoo. there's // one last sanity check that we have to do: we have to check // if there's a collision *within* the cfg node. NEXT_INTRA_CFG_NODE: for (DiGraphNode<Node, Branch> cfgNode : cfg.getDirectedGraphNodes()) { if (cfg.isImplicitReturn(cfgNode)) { continue NEXT_INTRA_CFG_NODE; } FlowState<LiveVariableLattice> state = cfgNode.getAnnotation(); boolean v1OutLive = state.getOut().isLive(v1); boolean v2OutLive = state.getOut().isLive(v2); CombinedLiveRangeChecker checker = new CombinedLiveRangeChecker( new LiveRangeChecker(v1, v2OutLive ? null : v2), new LiveRangeChecker(v2, v1OutLive ? null : v1)); NodeTraversal.traverse( compiler, cfgNode.getValue(), checker); if (checker.connectIfCrossed(interferenceGraph)) { continue NEXT_VAR_PAIR; } } } } return interferenceGraph; } #location 18 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public Symbol getSymbolForInstancesOf(FunctionType fn) { Preconditions.checkState(fn.isConstructor() || fn.isInterface()); ObjectType pType = fn.getPrototype(); return getSymbolForName(fn.getSource(), pType.getReferenceName()); }
#vulnerable code public Symbol getSymbolForInstancesOf(FunctionType fn) { Preconditions.checkState(fn.isConstructor() || fn.isInterface()); ObjectType pType = fn.getPrototype(); String name = pType.getReferenceName(); if (name == null || globalScope == null) { return null; } Node source = fn.getSource(); return (source == null ? globalScope : getEnclosingScope(source)).getSlot(name); } #location 11 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public static List<JSSourceFile> getDefaultExterns() throws IOException { InputStream input = CommandLineRunner.class.getResourceAsStream( "/externs.zip"); ZipInputStream zip = new ZipInputStream(input); Map<String, JSSourceFile> externsMap = Maps.newHashMap(); for (ZipEntry entry = null; (entry = zip.getNextEntry()) != null; ) { BufferedInputStream entryStream = new BufferedInputStream( new LimitInputStream(zip, entry.getSize())); externsMap.put(entry.getName(), JSSourceFile.fromInputStream( // Give the files an odd prefix, so that they do not conflict // with the user's files. "externs.zip//" + entry.getName(), entryStream)); } Preconditions.checkState( externsMap.keySet().equals(Sets.newHashSet(DEFAULT_EXTERNS_NAMES)), "Externs zip must match our hard-coded list of externs."); // Order matters, so the resources must be added to the result list // in the expected order. List<JSSourceFile> externs = Lists.newArrayList(); for (String key : DEFAULT_EXTERNS_NAMES) { externs.add(externsMap.get(key)); } return externs; }
#vulnerable code public static List<JSSourceFile> getDefaultExterns() throws IOException { InputStream input = CommandLineRunner.class.getResourceAsStream( "/externs.zip"); ZipInputStream zip = new ZipInputStream(input); Map<String, JSSourceFile> externsMap = Maps.newHashMap(); for (ZipEntry entry = null; (entry = zip.getNextEntry()) != null; ) { LimitInputStream entryStream = new LimitInputStream(zip, entry.getSize()); externsMap.put(entry.getName(), JSSourceFile.fromInputStream( // Give the files an odd prefix, so that they do not conflict // with the user's files. "externs.zip//" + entry.getName(), entryStream)); } Preconditions.checkState( externsMap.keySet().equals(Sets.newHashSet(DEFAULT_EXTERNS_NAMES)), "Externs zip must match our hard-coded list of externs."); // Order matters, so the resources must be added to the result list // in the expected order. List<JSSourceFile> externs = Lists.newArrayList(); for (String key : DEFAULT_EXTERNS_NAMES) { externs.add(externsMap.get(key)); } return externs; } #location 9 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private void outputManifest() throws IOException { List<String> outputManifests = config.outputManifests; if (outputManifests.isEmpty()) { return; } for (String outputManifest : outputManifests) { if (outputManifest.isEmpty()) { continue; } JSModuleGraph graph = compiler.getModuleGraph(); if (shouldGenerateManifestPerModule(outputManifest)) { // Generate per-module manifests. Iterable<JSModule> modules = graph.getAllModules(); for (JSModule module : modules) { Writer out = fileNameToOutputWriter( expandManifest(module, outputManifest)); printManifestTo(module.getInputs(), out); out.close(); } } else { // Generate a single file manifest. Writer out = fileNameToOutputWriter( expandManifest(null, outputManifest)); if (graph == null) { printManifestTo(compiler.getInputsInOrder(), out); } else { printModuleGraphManifestTo(graph, out); } out.close(); } } }
#vulnerable code private void outputManifest() throws IOException { String outputManifest = config.outputManifest; if (Strings.isEmpty(outputManifest)) { return; } JSModuleGraph graph = compiler.getModuleGraph(); if (shouldGenerateManifestPerModule()) { // Generate per-module manifests. Iterable<JSModule> modules = graph.getAllModules(); for (JSModule module : modules) { Writer out = fileNameToOutputWriter(expandManifest(module)); printManifestTo(module.getInputs(), out); out.close(); } } else { // Generate a single file manifest. Writer out = fileNameToOutputWriter(expandManifest(null)); if (graph == null) { printManifestTo(compiler.getInputsInOrder(), out); } else { printModuleGraphManifestTo(graph, out); } out.close(); } } #location 14 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private Node inlineReturnValue(Node callNode, Node fnNode) { Node block = fnNode.getLastChild(); Node callParentNode = callNode.getParent(); // NOTE: As the normalize pass guarantees globals aren't being // shadowed and an expression can't introduce new names, there is // no need to check for conflicts. // Create an argName -> expression map, checking for side effects. Map<String, Node> argMap = FunctionArgumentInjector.getFunctionCallParameterMap( fnNode, callNode, this.safeNameIdSupplier); Node newExpression; if (!block.hasChildren()) { Node srcLocation = block; newExpression = NodeUtil.newUndefinedNode(srcLocation); } else { Node returnNode = block.getFirstChild(); Preconditions.checkArgument(returnNode.getType() == Token.RETURN); // Clone the return node first. Node safeReturnNode = returnNode.cloneTree(); Node inlineResult = FunctionArgumentInjector.inject( null, safeReturnNode, null, argMap); Preconditions.checkArgument(safeReturnNode == inlineResult); newExpression = safeReturnNode.removeFirstChild(); } callParentNode.replaceChild(callNode, newExpression); return newExpression; }
#vulnerable code private Node inlineReturnValue(Node callNode, Node fnNode) { Node block = fnNode.getLastChild(); Node callParentNode = callNode.getParent(); // NOTE: As the normalize pass guarantees globals aren't being // shadowed and an expression can't introduce new names, there is // no need to check for conflicts. // Create an argName -> expression map, checking for side effects. Map<String, Node> argMap = FunctionArgumentInjector.getFunctionCallParameterMap( fnNode, callNode, this.safeNameIdSupplier); Node newExpression; if (!block.hasChildren()) { Node srcLocation = block; newExpression = NodeUtil.newUndefinedNode(srcLocation); } else { Node returnNode = block.getFirstChild(); Preconditions.checkArgument(returnNode.getType() == Token.RETURN); // Clone the return node first. Node safeReturnNode = returnNode.cloneTree(); Node inlineResult = FunctionArgumentInjector.inject( safeReturnNode, null, argMap); Preconditions.checkArgument(safeReturnNode == inlineResult); newExpression = safeReturnNode.removeFirstChild(); } callParentNode.replaceChild(callNode, newExpression); return newExpression; } #location 30 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public void process(Node externs, Node root) { CollectTweaksResult result = collectTweaks(root); applyCompilerDefaultValueOverrides(result.tweakInfos); boolean changed = false; if (stripTweaks) { changed = stripAllCalls(result.tweakInfos); } else if (!compilerDefaultValueOverrides.isEmpty()) { changed = replaceGetCompilerOverridesCalls(result.getOverridesCalls); } if (changed) { compiler.reportCodeChange(); } }
#vulnerable code @Override public void process(Node externs, Node root) { Map<String, TweakInfo> tweakInfos = collectTweaks(root); applyCompilerDefaultValueOverrides(tweakInfos); boolean changed = false; if (stripTweaks) { changed = stripAllCalls(tweakInfos); } else if (!compilerDefaultValueOverrides.isEmpty()) { // Pass the compiler default value overrides to the JS through a specially // named variable. Node varNode = createCompilerDefaultValueOverridesVarNode( root.getFirstChild()); root.getFirstChild().addChildToFront(varNode); changed = true; } if (changed) { compiler.reportCodeChange(); } } #location 15 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code FunctionTypeBuilder(String fnName, AbstractCompiler compiler, Node errorRoot, String sourceName, Scope scope) { Preconditions.checkNotNull(errorRoot); this.fnName = fnName == null ? "" : fnName; this.codingConvention = compiler.getCodingConvention(); this.typeRegistry = compiler.getTypeRegistry(); this.errorRoot = errorRoot; this.sourceName = sourceName; this.compiler = compiler; this.scope = scope; }
#vulnerable code FunctionTypeBuilder inferReturnType(@Nullable JSDocInfo info) { if (info != null && info.hasReturnType()) { returnType = info.getReturnType().evaluate(scope, typeRegistry); returnTypeInferred = false; } return this; } #location 3 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private void analyzeFunctionBwd( List<DiGraphNode<Node, ControlFlowGraph.Branch>> workset) { for (DiGraphNode<Node, ControlFlowGraph.Branch> dn : workset) { Node n = dn.getValue(); if (n.isThrow()) { // Throw statements have no out edges. // TODO(blickly): Support analyzing the body of the THROW continue; } TypeEnv outEnv = getOutEnv(n); TypeEnv inEnv; System.out.println("\tBWD Statment: " + n); System.out.println("\t\toutEnv: " + outEnv); switch (n.getType()) { case Token.EXPR_RESULT: inEnv = analyzeExprBwd(n.getFirstChild(), outEnv, JSType.TOP).env; break; case Token.RETURN: { Node retExp = n.getFirstChild(); if (retExp == null) { inEnv = outEnv; } else { JSType declRetType = currentScope.getDeclaredType().getReturnType(); declRetType = declRetType == null ? JSType.UNKNOWN : declRetType; inEnv = analyzeExprBwd(retExp, outEnv, declRetType).env; } break; } case Token.VAR: { inEnv = null; for (Node nameNode = n.getFirstChild(); nameNode != null; nameNode = nameNode.getNext()) { String varName = nameNode.getQualifiedName(); Node rhs = nameNode.getFirstChild(); JSType declType = currentScope.getDeclaredTypeOf(varName); inEnv = envPutType(outEnv, varName, JSType.UNKNOWN); if (rhs == null || currentScope.isLocalFunDef(varName)) { continue; } JSType requiredType = (declType == null) ? JSType.UNKNOWN : declType; inEnv = analyzeExprBwd(rhs, inEnv, JSType.meet(requiredType, envGetType(outEnv, varName))).env; } break; } case Token.BLOCK: case Token.EMPTY: inEnv = outEnv; break; case Token.FOR: // TODO(blickly): Analyze these statements case Token.WHILE: case Token.DO: case Token.IF: inEnv = outEnv; break; default: if (NodeUtil.isStatement(n)) { throw new RuntimeException("Unhandled statement type: " + Token.name(n.getType())); } else { inEnv = analyzeExprBwd(n, outEnv).env; break; } } System.out.println("\t\tinEnv: " + inEnv); setInEnv(n, inEnv); } }
#vulnerable code private void analyzeFunctionBwd( List<DiGraphNode<Node, ControlFlowGraph.Branch>> workset) { for (DiGraphNode<Node, ControlFlowGraph.Branch> dn : workset) { Node n = dn.getValue(); TypeEnv outEnv = getOutEnv(n); TypeEnv inEnv; System.out.println("\tBWD Statment: " + n); System.out.println("\t\toutEnv: " + outEnv); switch (n.getType()) { case Token.EXPR_RESULT: inEnv = analyzeExprBwd(n.getFirstChild(), outEnv, JSType.TOP).env; break; case Token.RETURN: { Node retExp = n.getFirstChild(); if (retExp == null) { inEnv = outEnv; } else { JSType declRetType = currentScope.getDeclaredType().getReturnType(); declRetType = declRetType == null ? JSType.UNKNOWN : declRetType; inEnv = analyzeExprBwd(retExp, outEnv, declRetType).env; } break; } case Token.VAR: { inEnv = null; for (Node nameNode = n.getFirstChild(); nameNode != null; nameNode = nameNode.getNext()) { String varName = nameNode.getQualifiedName(); Node rhs = nameNode.getFirstChild(); JSType declType = currentScope.getDeclaredTypeOf(varName); inEnv = envPutType(outEnv, varName, JSType.UNKNOWN); if (rhs == null || currentScope.isLocalFunDef(varName)) { continue; } JSType requiredType = (declType == null) ? JSType.UNKNOWN : declType; inEnv = analyzeExprBwd(rhs, inEnv, JSType.meet(requiredType, envGetType(outEnv, varName))).env; } break; } default: inEnv = outEnv; break; } System.out.println("\t\tinEnv: " + inEnv); setInEnv(n, inEnv); } } #location 38 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private void outputManifest() throws IOException { List<String> outputManifests = config.outputManifests; if (outputManifests.isEmpty()) { return; } for (String outputManifest : outputManifests) { if (outputManifest.isEmpty()) { continue; } JSModuleGraph graph = compiler.getModuleGraph(); if (shouldGenerateManifestPerModule(outputManifest)) { // Generate per-module manifests. Iterable<JSModule> modules = graph.getAllModules(); for (JSModule module : modules) { Writer out = fileNameToOutputWriter( expandManifest(module, outputManifest)); printManifestTo(module.getInputs(), out); out.close(); } } else { // Generate a single file manifest. Writer out = fileNameToOutputWriter( expandManifest(null, outputManifest)); if (graph == null) { printManifestTo(compiler.getInputsInOrder(), out); } else { printModuleGraphManifestTo(graph, out); } out.close(); } } }
#vulnerable code private void outputManifest() throws IOException { String outputManifest = config.outputManifest; if (Strings.isEmpty(outputManifest)) { return; } JSModuleGraph graph = compiler.getModuleGraph(); if (shouldGenerateManifestPerModule()) { // Generate per-module manifests. Iterable<JSModule> modules = graph.getAllModules(); for (JSModule module : modules) { Writer out = fileNameToOutputWriter(expandManifest(module)); printManifestTo(module.getInputs(), out); out.close(); } } else { // Generate a single file manifest. Writer out = fileNameToOutputWriter(expandManifest(null)); if (graph == null) { printManifestTo(compiler.getInputsInOrder(), out); } else { printModuleGraphManifestTo(graph, out); } out.close(); } } #location 20 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private List<JSSourceFile> getDefaultExterns() { try { return CommandLineRunner.getDefaultExterns(); } catch (IOException e) { throw new BuildException(e); } }
#vulnerable code private List<JSSourceFile> getDefaultExterns() { try { InputStream input = Compiler.class.getResourceAsStream( "/externs.zip"); ZipInputStream zip = new ZipInputStream(input); List<JSSourceFile> externs = Lists.newLinkedList(); for (ZipEntry entry; (entry = zip.getNextEntry()) != null; ) { LimitInputStream entryStream = new LimitInputStream(zip, entry.getSize()); externs.add( JSSourceFile.fromInputStream(entry.getName(), entryStream)); } return externs; } catch (IOException e) { throw new BuildException(e); } } #location 12 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public void setSqlSource(MappedStatement ms) { MapperTemplate mapperTemplate = getMapperTemplate(ms.getId()); try { if (mapperTemplate != null) { mapperTemplate.setSqlSource(ms); } } catch (Exception e) { throw new RuntimeException("调用方法异常:" + e.getMessage()); } }
#vulnerable code public void setSqlSource(MappedStatement ms) { MapperTemplate mapperTemplate = getMapperTemplate(ms.getId()); try { mapperTemplate.setSqlSource(ms); } catch (Exception e) { throw new RuntimeException("调用方法异常:" + e.getMessage()); } } #location 4 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code protected String read(InputStream inputStream) throws IOException { BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, encoding)); StringBuffer stringBuffer = new StringBuffer(); String line = reader.readLine(); while (line != null) { stringBuffer.append(line).append("\n"); line = reader.readLine(); } return stringBuffer.toString(); }
#vulnerable code protected String read(InputStream inputStream) throws IOException { BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream)); StringBuffer stringBuffer = new StringBuffer(); String line = reader.readLine(); while (line != null) { stringBuffer.append(line).append("\n"); line = reader.readLine(); } return stringBuffer.toString(); } #location 9 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public static void main(String[] args) { example(); }
#vulnerable code public static void main(String[] args) { @SuppressWarnings("resource") ApplicationContext context = new ClassPathXmlApplicationContext("exampleContext.xml"); SpringCacheExample example = context.getBean(SpringCacheExample.class); example.getBook(0); example.getBook(0); } #location 4 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public static void main(String[] args) { example(); }
#vulnerable code public static void main(String[] args) { @SuppressWarnings({ "resource", "unused" }) ApplicationContext context = new ClassPathXmlApplicationContext("exampleContext.xml"); } #location 3 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public static void main(String[] args) { example(); }
#vulnerable code public static void main(String[] args) { Cache<Integer, Integer> cache = CacheBuilder.transactionalHeapCache() .transactionCommitter(new TransactionCommitter<Integer, Integer>() { int counter = 0; public void doPut(Integer key, Integer value) { if (counter < 3) { System.out.println("key[" + key + "]," + "value[" + value + "]"); counter++; } else { throw new RuntimeException(); } } }).build(); Transaction transaction1 = CacheTransaction.get(); transaction1.begin(); try { cache.put(3, 5); cache.put(10, 14); transaction1.commit(); } catch (TransactionException exception) { transaction1.rollback(); } finally { transaction1.close(); } System.out.println("Value for the key 3 is " + cache.get(3)); System.out.println("Value for the key 10 is " + cache.get(10)); Transaction transaction2 = CacheTransaction.get(); transaction2.begin(); try { cache.put(1, 10); cache.put(10, 13); transaction2.commit(); } catch (TransactionException exception) { transaction2.rollback(); } finally { transaction2.close(); } System.out.println("Value for the key 1 is " + cache.get(1)); System.out.println("Value for the key 10 is " + cache.get(10)); } #location 29 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private Mapping getMapping(MappingPrism mapping) { return new Mapping( mapping.source(), mapping.target() ); }
#vulnerable code private Mapping getMapping(MappingPrism mapping) { Type converterType = typeUtil.retrieveType( mapping.converter() ); return new Mapping( mapping.source(), mapping.target(), converterType.getName().equals( "NoOpConverter" ) ? null : converterType ); } #location 6 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private List<MappedProperty> getMappedProperties(ExecutableElement method, Map<String, Mapping> mappings) { TypeElement returnTypeElement = (TypeElement) typeUtils.asElement( method.getReturnType() ); TypeElement parameterElement = (TypeElement) typeUtils.asElement( method.getParameters().get( 0 ).asType() ); List<MappedProperty> properties = new ArrayList<MappedProperty>(); List<ExecutableElement> sourceGetters = Filters.getterMethodsIn( elementUtils.getAllMembers( parameterElement ) ); List<ExecutableElement> targetSetters = Filters.setterMethodsIn( elementUtils.getAllMembers( returnTypeElement ) ); List<ExecutableElement> sourceSetters = Filters.setterMethodsIn( elementUtils.getAllMembers( parameterElement ) ); List<ExecutableElement> targetGetters = Filters.getterMethodsIn( elementUtils.getAllMembers( returnTypeElement ) ); reportErrorIfMappedPropertiesDontExist( method, mappings, sourceGetters, targetSetters ); for ( ExecutableElement getterMethod : sourceGetters ) { String sourcePropertyName = Executables.getPropertyName( getterMethod ); Mapping mapping = mappings.get( sourcePropertyName ); for ( ExecutableElement setterMethod : targetSetters ) { String targetPropertyName = Executables.getPropertyName( setterMethod ); if ( targetPropertyName.equals( mapping != null ? mapping.getTargetName() : sourcePropertyName ) ) { properties.add( new MappedProperty( sourcePropertyName, getterMethod.getSimpleName().toString(), Executables.getCorrespondingPropertyAccessor( getterMethod, sourceSetters ) .getSimpleName() .toString(), retrieveReturnType( getterMethod ), mapping != null ? mapping.getTargetName() : targetPropertyName, Executables.getCorrespondingPropertyAccessor( setterMethod, targetGetters ) .getSimpleName() .toString(), setterMethod.getSimpleName().toString(), retrieveParameter( setterMethod ).getType() ) ); } } } return properties; }
#vulnerable code private List<MappedProperty> getMappedProperties(ExecutableElement method, Map<String, Mapping> mappings) { Element returnTypeElement = typeUtils.asElement( method.getReturnType() ); Element parameterElement = typeUtils.asElement( method.getParameters().get( 0 ).asType() ); List<MappedProperty> properties = new ArrayList<MappedProperty>(); List<ExecutableElement> sourceGetters = Filters.getterMethodsIn( parameterElement.getEnclosedElements() ); List<ExecutableElement> targetSetters = Filters.setterMethodsIn( returnTypeElement.getEnclosedElements() ); reportErrorIfMappedPropertiesDontExist( method, mappings, sourceGetters, targetSetters ); for ( ExecutableElement getterMethod : sourceGetters ) { String sourcePropertyName = Executables.getPropertyName( getterMethod ); Mapping mapping = mappings.get( sourcePropertyName ); for ( ExecutableElement setterMethod : targetSetters ) { String targetPropertyName = Executables.getPropertyName( setterMethod ); if ( targetPropertyName.equals( mapping != null ? mapping.getTargetName() : sourcePropertyName ) ) { properties.add( new MappedProperty( sourcePropertyName, getterMethod.getSimpleName().toString(), Executables.getCorrespondingSetterMethod( parameterElement, getterMethod ) .getSimpleName() .toString(), retrieveReturnType( getterMethod ), mapping != null ? mapping.getTargetName() : targetPropertyName, Executables.getCorrespondingGetterMethod( returnTypeElement, setterMethod ) .getSimpleName() .toString(), setterMethod.getSimpleName().toString(), retrieveParameter( setterMethod ).getType() ) ); } } } return properties; } #location 24 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public boolean process( final Set<? extends TypeElement> annotations, final RoundEnvironment roundEnvironment) { for ( TypeElement oneAnnotation : annotations ) { //Indicates that the annotation's type isn't on the class path of the compiled //project. Let the compiler deal with that and print an appropriate error. if ( oneAnnotation.getKind() != ElementKind.ANNOTATION_TYPE ) { continue; } for ( Element oneAnnotatedElement : roundEnvironment.getElementsAnnotatedWith( oneAnnotation ) ) { oneAnnotatedElement.accept( new MapperGenerationVisitor( processingEnv ), null ); } } return ANNOTATIONS_CLAIMED_EXCLUSIVELY; }
#vulnerable code @Override public boolean process( final Set<? extends TypeElement> annotations, final RoundEnvironment roundEnvironment) { for ( TypeElement oneAnnotation : annotations ) { //Indicates that the annotation's type isn't on the class path of the compiled //project. Let the compiler deal with that and print an appropriate error. if ( oneAnnotation.getKind() != ElementKind.ANNOTATION_TYPE ) { continue; } for ( Element oneAnnotatedElement : roundEnvironment.getElementsAnnotatedWith( oneAnnotation ) ) { oneAnnotatedElement.accept( new MapperGenerationVisitor( processingEnv, configuration ), null ); } } return ANNOTATIONS_CLAIMED_EXCLUSIVELY; } #location 15 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private List<MappedProperty> getMappedProperties(ExecutableElement method, Map<String, Mapping> mappings) { TypeElement returnTypeElement = (TypeElement) typeUtils.asElement( method.getReturnType() ); TypeElement parameterElement = (TypeElement) typeUtils.asElement( method.getParameters().get( 0 ).asType() ); List<MappedProperty> properties = new ArrayList<MappedProperty>(); List<ExecutableElement> sourceGetters = Filters.getterMethodsIn( elementUtils.getAllMembers( parameterElement ) ); List<ExecutableElement> targetSetters = Filters.setterMethodsIn( elementUtils.getAllMembers( returnTypeElement ) ); List<ExecutableElement> sourceSetters = Filters.setterMethodsIn( elementUtils.getAllMembers( parameterElement ) ); List<ExecutableElement> targetGetters = Filters.getterMethodsIn( elementUtils.getAllMembers( returnTypeElement ) ); reportErrorIfMappedPropertiesDontExist( method, mappings, sourceGetters, targetSetters ); for ( ExecutableElement getterMethod : sourceGetters ) { String sourcePropertyName = Executables.getPropertyName( getterMethod ); Mapping mapping = mappings.get( sourcePropertyName ); for ( ExecutableElement setterMethod : targetSetters ) { String targetPropertyName = Executables.getPropertyName( setterMethod ); if ( targetPropertyName.equals( mapping != null ? mapping.getTargetName() : sourcePropertyName ) ) { properties.add( new MappedProperty( sourcePropertyName, getterMethod.getSimpleName().toString(), Executables.getCorrespondingPropertyAccessor( getterMethod, sourceSetters ) .getSimpleName() .toString(), retrieveReturnType( getterMethod ), mapping != null ? mapping.getTargetName() : targetPropertyName, Executables.getCorrespondingPropertyAccessor( setterMethod, targetGetters ) .getSimpleName() .toString(), setterMethod.getSimpleName().toString(), retrieveParameter( setterMethod ).getType() ) ); } } } return properties; }
#vulnerable code private List<MappedProperty> getMappedProperties(ExecutableElement method, Map<String, Mapping> mappings) { Element returnTypeElement = typeUtils.asElement( method.getReturnType() ); Element parameterElement = typeUtils.asElement( method.getParameters().get( 0 ).asType() ); List<MappedProperty> properties = new ArrayList<MappedProperty>(); List<ExecutableElement> sourceGetters = Filters.getterMethodsIn( parameterElement.getEnclosedElements() ); List<ExecutableElement> targetSetters = Filters.setterMethodsIn( returnTypeElement.getEnclosedElements() ); reportErrorIfMappedPropertiesDontExist( method, mappings, sourceGetters, targetSetters ); for ( ExecutableElement getterMethod : sourceGetters ) { String sourcePropertyName = Executables.getPropertyName( getterMethod ); Mapping mapping = mappings.get( sourcePropertyName ); for ( ExecutableElement setterMethod : targetSetters ) { String targetPropertyName = Executables.getPropertyName( setterMethod ); if ( targetPropertyName.equals( mapping != null ? mapping.getTargetName() : sourcePropertyName ) ) { properties.add( new MappedProperty( sourcePropertyName, getterMethod.getSimpleName().toString(), Executables.getCorrespondingSetterMethod( parameterElement, getterMethod ) .getSimpleName() .toString(), retrieveReturnType( getterMethod ), mapping != null ? mapping.getTargetName() : targetPropertyName, Executables.getCorrespondingGetterMethod( returnTypeElement, setterMethod ) .getSimpleName() .toString(), setterMethod.getSimpleName().toString(), retrieveParameter( setterMethod ).getType() ) ); } } } return properties; } #location 24 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private List<Method> retrieveMethods(TypeElement element, boolean implementationRequired) { List<Method> methods = new ArrayList<Method>(); MapperPrism mapperPrism = implementationRequired ? MapperPrism.getInstanceOn( element ) : null; for ( ExecutableElement executable : methodsIn( element.getEnclosedElements() ) ) { Method method = getMethod( element, executable, implementationRequired ); if ( method != null ) { methods.add( method ); } } //Add all methods of used mappers in order to reference them in the aggregated model if ( implementationRequired ) { for ( TypeMirror usedMapper : mapperPrism.uses() ) { methods.addAll( retrieveMethods( (TypeElement) ( (DeclaredType) usedMapper ).asElement(), false ) ); } } return methods; }
#vulnerable code private List<Method> retrieveMethods(TypeElement element, boolean implementationRequired) { List<Method> methods = new ArrayList<Method>(); MapperPrism mapperPrism = implementationRequired ? MapperPrism.getInstanceOn( element ) : null; //TODO Extract to separate method for ( ExecutableElement method : methodsIn( element.getEnclosedElements() ) ) { Parameter parameter = executables.retrieveParameter( method ); Type returnType = executables.retrieveReturnType( method ); boolean mappingErroneous = false; if ( implementationRequired ) { if ( parameter.getType().isIterableType() && !returnType.isIterableType() ) { printMessage( ReportingPolicy.ERROR, "Can't generate mapping method from iterable type to non-iterable type.", method ); mappingErroneous = true; } if ( !parameter.getType().isIterableType() && returnType.isIterableType() ) { printMessage( ReportingPolicy.ERROR, "Can't generate mapping method from non-iterable type to iterable type.", method ); mappingErroneous = true; } if ( parameter.getType().isPrimitive() ) { printMessage( ReportingPolicy.ERROR, "Can't generate mapping method with primitive parameter type.", method ); mappingErroneous = true; } if ( returnType.isPrimitive() ) { printMessage( ReportingPolicy.ERROR, "Can't generate mapping method with primitive return type.", method ); mappingErroneous = true; } if ( mappingErroneous ) { continue; } } //add method with property mappings if an implementation needs to be generated if ( implementationRequired ) { methods.add( Method.forMethodRequiringImplementation( method, parameter.getName(), parameter.getType(), returnType, getMappings( method ) ) ); } //otherwise add reference to existing mapper method else { methods.add( Method.forReferencedMethod( typeUtil.getType( typeUtils.getDeclaredType( element ) ), method, parameter.getName(), parameter.getType(), returnType ) ); } } //Add all methods of used mappers in order to reference them in the aggregated model if ( implementationRequired ) { for ( TypeMirror usedMapper : mapperPrism.uses() ) { methods.addAll( retrieveMethods( (TypeElement) ( (DeclaredType) usedMapper ).asElement(), false ) ); } } return methods; } #location 57 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private void deleteDirectory(File path) { if ( path.exists() ) { File[] files = path.listFiles(); for ( File file : files ) { if ( file.isDirectory() ) { deleteDirectory( file ); } else { file.delete(); } } } path.delete(); }
#vulnerable code private void deleteDirectory(File path) { if ( path.exists() ) { File[] files = path.listFiles(); for ( int i = 0; i < files.length; i++ ) { if ( files[i].isDirectory() ) { deleteDirectory( files[i] ); } else { files[i].delete(); } } } path.delete(); } #location 4 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private List<Method> retrieveMethods(TypeElement element, boolean mapperRequiresImplementation) { List<Method> methods = new ArrayList<Method>(); for ( ExecutableElement executable : methodsIn( element.getEnclosedElements() ) ) { Method method = getMethod( element, executable, mapperRequiresImplementation ); if ( method != null ) { methods.add( method ); } } //Add all methods of used mappers in order to reference them in the aggregated model if ( mapperRequiresImplementation ) { MapperPrism mapperPrism = MapperPrism.getInstanceOn( element ); if ( !mapperPrism.isValid ) { throw new AnnotationProcessingException( "Couldn't retrieve @Mapper annotation", element, mapperPrism.mirror ); } for ( TypeMirror usedMapper : mapperPrism.uses() ) { methods.addAll( retrieveMethods( (TypeElement) ( (DeclaredType) usedMapper ).asElement(), false ) ); } } return methods; }
#vulnerable code private List<Method> retrieveMethods(TypeElement element, boolean mapperRequiresImplementation) { List<Method> methods = new ArrayList<Method>(); MapperPrism mapperPrism = mapperRequiresImplementation ? MapperPrism.getInstanceOn( element ) : null; for ( ExecutableElement executable : methodsIn( element.getEnclosedElements() ) ) { Method method = getMethod( element, executable, mapperRequiresImplementation ); if ( method != null ) { methods.add( method ); } } //Add all methods of used mappers in order to reference them in the aggregated model if ( mapperRequiresImplementation ) { for ( TypeMirror usedMapper : mapperPrism.uses() ) { methods.addAll( retrieveMethods( (TypeElement) ( (DeclaredType) usedMapper ).asElement(), false ) ); } } return methods; } #location 15 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private MappingMethod getBeanMappingMethod(List<Method> methods, Method method, ReportingPolicy unmappedTargetPolicy) { List<PropertyMapping> propertyMappings = new ArrayList<PropertyMapping>(); Set<String> mappedTargetProperties = new HashSet<String>(); Map<String, Mapping> mappings = method.getMappings(); TypeElement resultTypeElement = elementUtils.getTypeElement( method.getResultType().getCanonicalName() ); TypeElement parameterElement = elementUtils.getTypeElement( method.getSingleSourceParameter() .getType() .getCanonicalName() ); List<ExecutableElement> sourceGetters = Filters.getterMethodsIn( elementUtils.getAllMembers( parameterElement ) ); List<ExecutableElement> targetSetters = Filters.setterMethodsIn( elementUtils.getAllMembers( resultTypeElement ) ); Set<String> sourceProperties = executables.getPropertyNames( Filters.getterMethodsIn( sourceGetters ) ); Set<String> targetProperties = executables.getPropertyNames( Filters.setterMethodsIn( targetSetters ) ); reportErrorIfMappedPropertiesDontExist( method, sourceProperties, targetProperties ); for ( ExecutableElement getterMethod : sourceGetters ) { String sourcePropertyName = executables.getPropertyName( getterMethod ); Mapping mapping = mappings.get( sourcePropertyName ); String dateFormat = mapping != null ? mapping.getDateFormat() : null; for ( ExecutableElement setterMethod : targetSetters ) { String targetPropertyName = executables.getPropertyName( setterMethod ); if ( targetPropertyName.equals( mapping != null ? mapping.getTargetName() : sourcePropertyName ) ) { PropertyMapping property = getPropertyMapping( methods, method, getterMethod, setterMethod, dateFormat ); propertyMappings.add( property ); mappedTargetProperties.add( targetPropertyName ); } } } reportErrorForUnmappedTargetPropertiesIfRequired( method, unmappedTargetPolicy, targetProperties, mappedTargetProperties ); return new BeanMappingMethod( method, propertyMappings ); }
#vulnerable code private MappingMethod getBeanMappingMethod(List<Method> methods, Method method, ReportingPolicy unmappedTargetPolicy) { List<PropertyMapping> propertyMappings = new ArrayList<PropertyMapping>(); Set<String> mappedTargetProperties = new HashSet<String>(); Map<String, Mapping> mappings = method.getMappings(); TypeElement resultTypeElement = elementUtils.getTypeElement( method.getResultType().getCanonicalName() ); TypeElement parameterElement = elementUtils.getTypeElement( method.getSingleSourceType().getCanonicalName() ); List<ExecutableElement> sourceGetters = Filters.getterMethodsIn( elementUtils.getAllMembers( parameterElement ) ); List<ExecutableElement> targetSetters = Filters.setterMethodsIn( elementUtils.getAllMembers( resultTypeElement ) ); Set<String> sourceProperties = executables.getPropertyNames( Filters.getterMethodsIn( sourceGetters ) ); Set<String> targetProperties = executables.getPropertyNames( Filters.setterMethodsIn( targetSetters ) ); reportErrorIfMappedPropertiesDontExist( method, sourceProperties, targetProperties ); for ( ExecutableElement getterMethod : sourceGetters ) { String sourcePropertyName = executables.getPropertyName( getterMethod ); Mapping mapping = mappings.get( sourcePropertyName ); String dateFormat = mapping != null ? mapping.getDateFormat() : null; for ( ExecutableElement setterMethod : targetSetters ) { String targetPropertyName = executables.getPropertyName( setterMethod ); if ( targetPropertyName.equals( mapping != null ? mapping.getTargetName() : sourcePropertyName ) ) { PropertyMapping property = getPropertyMapping( methods, method, getterMethod, setterMethod, dateFormat ); propertyMappings.add( property ); mappedTargetProperties.add( targetPropertyName ); } } } reportErrorForUnmappedTargetPropertiesIfRequired( method, unmappedTargetPolicy, targetProperties, mappedTargetProperties ); return new BeanMappingMethod( method.getName(), method.getParameters(), method.getSourceParameters(), method.getResultType(), method.getResultName(), method.getReturnType(), propertyMappings ); } #location 9 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private List<MappedProperty> getMappedProperties(ExecutableElement method, Map<String, Mapping> mappings) { TypeElement returnTypeElement = (TypeElement) typeUtils.asElement( method.getReturnType() ); TypeElement parameterElement = (TypeElement) typeUtils.asElement( method.getParameters().get( 0 ).asType() ); List<MappedProperty> properties = new ArrayList<MappedProperty>(); List<ExecutableElement> sourceGetters = Filters.getterMethodsIn( elementUtils.getAllMembers( parameterElement ) ); List<ExecutableElement> targetSetters = Filters.setterMethodsIn( elementUtils.getAllMembers( returnTypeElement ) ); List<ExecutableElement> sourceSetters = Filters.setterMethodsIn( elementUtils.getAllMembers( parameterElement ) ); List<ExecutableElement> targetGetters = Filters.getterMethodsIn( elementUtils.getAllMembers( returnTypeElement ) ); reportErrorIfMappedPropertiesDontExist( method, mappings, sourceGetters, targetSetters ); for ( ExecutableElement getterMethod : sourceGetters ) { String sourcePropertyName = Executables.getPropertyName( getterMethod ); Mapping mapping = mappings.get( sourcePropertyName ); for ( ExecutableElement setterMethod : targetSetters ) { String targetPropertyName = Executables.getPropertyName( setterMethod ); if ( targetPropertyName.equals( mapping != null ? mapping.getTargetName() : sourcePropertyName ) ) { ExecutableElement correspondingSetter = Executables.getCorrespondingPropertyAccessor( getterMethod, sourceSetters ); ExecutableElement correspondingGetter = Executables.getCorrespondingPropertyAccessor( setterMethod, targetGetters ); properties.add( new MappedProperty( sourcePropertyName, getterMethod.getSimpleName().toString(), correspondingSetter != null ? correspondingSetter.getSimpleName().toString() : null, retrieveReturnType( getterMethod ), mapping != null ? mapping.getTargetName() : targetPropertyName, correspondingGetter != null ? correspondingGetter.getSimpleName().toString() : null, setterMethod.getSimpleName().toString(), retrieveParameter( setterMethod ).getType() ) ); } } } return properties; }
#vulnerable code private List<MappedProperty> getMappedProperties(ExecutableElement method, Map<String, Mapping> mappings) { TypeElement returnTypeElement = (TypeElement) typeUtils.asElement( method.getReturnType() ); TypeElement parameterElement = (TypeElement) typeUtils.asElement( method.getParameters().get( 0 ).asType() ); List<MappedProperty> properties = new ArrayList<MappedProperty>(); List<ExecutableElement> sourceGetters = Filters.getterMethodsIn( elementUtils.getAllMembers( parameterElement ) ); List<ExecutableElement> targetSetters = Filters.setterMethodsIn( elementUtils.getAllMembers( returnTypeElement ) ); List<ExecutableElement> sourceSetters = Filters.setterMethodsIn( elementUtils.getAllMembers( parameterElement ) ); List<ExecutableElement> targetGetters = Filters.getterMethodsIn( elementUtils.getAllMembers( returnTypeElement ) ); reportErrorIfMappedPropertiesDontExist( method, mappings, sourceGetters, targetSetters ); for ( ExecutableElement getterMethod : sourceGetters ) { String sourcePropertyName = Executables.getPropertyName( getterMethod ); Mapping mapping = mappings.get( sourcePropertyName ); for ( ExecutableElement setterMethod : targetSetters ) { String targetPropertyName = Executables.getPropertyName( setterMethod ); if ( targetPropertyName.equals( mapping != null ? mapping.getTargetName() : sourcePropertyName ) ) { properties.add( new MappedProperty( sourcePropertyName, getterMethod.getSimpleName().toString(), Executables.getCorrespondingPropertyAccessor( getterMethod, sourceSetters ) .getSimpleName() .toString(), retrieveReturnType( getterMethod ), mapping != null ? mapping.getTargetName() : targetPropertyName, Executables.getCorrespondingPropertyAccessor( setterMethod, targetGetters ) .getSimpleName() .toString(), setterMethod.getSimpleName().toString(), retrieveParameter( setterMethod ).getType() ) ); } } } return properties; } #location 35 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private PropertyMapping getConstantMapping(List<MapperReference> mapperReferences, List<SourceMethod> methods, SourceMethod method, String constantExpression, ExecutableElement targetAccessor, String dateFormat, List<TypeMirror> qualifiers) { // source String mappedElement = "constant '" + constantExpression + "'"; Type sourceType = typeFactory.getType( String.class ); // target Type targetType; if ( Executables.isSetterMethod( targetAccessor ) ) { targetType = typeFactory.getSingleParameter( targetAccessor ).getType(); } else { targetType = typeFactory.getReturnType( targetAccessor ); } String targetPropertyName = Executables.getPropertyName( targetAccessor ); Assignment assignment = mappingResolver.getTargetAssignment( method, mappedElement, mapperReferences, methods, sourceType, targetType, targetPropertyName, dateFormat, qualifiers, constantExpression ); if ( assignment != null ) { // target accessor is setter, so decorate assignment as setter assignment = new SetterWrapper( assignment, method.getThrownTypes() ); // wrap when dealing with getter only on target if ( Executables.isGetterMethod( targetAccessor ) ) { assignment = new GetterCollectionOrMapWrapper( assignment ); } } else { messager.printMessage( Kind.ERROR, String.format( "Can't map \"%s %s\" to \"%s %s\".", sourceType, constantExpression, targetType, targetPropertyName ), method.getExecutable() ); } return new PropertyMapping( targetAccessor.getSimpleName().toString(), targetType, assignment ); }
#vulnerable code private PropertyMapping getConstantMapping(List<MapperReference> mapperReferences, List<SourceMethod> methods, SourceMethod method, String constantExpression, ExecutableElement targetAccessor, String dateFormat, List<TypeMirror> qualifiers) { // source String mappedElement = "constant '" + constantExpression + "'"; Type sourceType = typeFactory.getType( String.class ); // target Type targetType = typeFactory.getSingleParameter( targetAccessor ).getType(); String targetPropertyName = Executables.getPropertyName( targetAccessor ); Assignment assignment = mappingResolver.getTargetAssignment( method, mappedElement, mapperReferences, methods, sourceType, targetType, targetPropertyName, dateFormat, qualifiers, constantExpression ); if ( assignment != null ) { // target accessor is setter, so decorate assignment as setter assignment = new SetterWrapper( assignment, method.getThrownTypes() ); } else { messager.printMessage( Kind.ERROR, String.format( "Can't map \"%s %s\" to \"%s %s\".", sourceType, constantExpression, targetType, targetPropertyName ), method.getExecutable() ); } return new PropertyMapping( targetAccessor.getSimpleName().toString(), targetType, assignment ); } #location 14 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private PropertyMapping getPropertyMapping(List<MapperReference> mapperReferences, List<SourceMethod> methods, SourceMethod method, Parameter parameter, ExecutableElement sourceAccessor, ExecutableElement targetAcessor, String dateFormat) { Type sourceType = typeFactory.getReturnType( sourceAccessor ); Type targetType = null; String conversionString = parameter.getName() + "." + sourceAccessor.getSimpleName().toString() + "()"; if ( Executables.isSetterMethod( targetAcessor ) ) { targetType = typeFactory.getSingleParameter( targetAcessor ).getType(); } else if ( Executables.isGetterMethod( targetAcessor ) ) { targetType = typeFactory.getReturnType( targetAcessor ); } String targetPropertyName = Executables.getPropertyName( targetAcessor ); String mappedElement = "property '" + Executables.getPropertyName( sourceAccessor ) + "'"; ParameterAssignment parameterAssignment = mappingResolver.getParameterAssignment( method, mappedElement, mapperReferences, methods, sourceType, targetType, targetPropertyName, dateFormat, conversionString ); PropertyMapping property = new PropertyMapping( parameter.getName(), Executables.getPropertyName( sourceAccessor ), sourceAccessor.getSimpleName().toString(), sourceType, Executables.getPropertyName( targetAcessor ), targetAcessor.getSimpleName().toString(), targetType, parameterAssignment != null ? parameterAssignment.getMethodReference() : null, parameterAssignment != null ? parameterAssignment.getTypeConversion() : null ); if ( !isPropertyMappable( property ) ) { messager.printMessage( Kind.ERROR, String.format( "Can't map property \"%s %s\" to \"%s %s\".", property.getSourceType(), property.getSourceName(), property.getTargetType(), property.getTargetName() ), method.getExecutable() ); } return property; }
#vulnerable code private PropertyMapping getPropertyMapping(List<MapperReference> mapperReferences, List<SourceMethod> methods, SourceMethod method, Parameter parameter, ExecutableElement sourceAccessor, ExecutableElement targetAcessor, String dateFormat) { Type sourceType = typeFactory.getReturnType( sourceAccessor ); Type targetType = null; String conversionString = parameter.getName() + "." + sourceAccessor.getSimpleName().toString() + "()"; if ( Executables.isSetterMethod( targetAcessor ) ) { targetType = typeFactory.getSingleParameter( targetAcessor ).getType(); } else if ( Executables.isGetterMethod( targetAcessor ) ) { targetType = typeFactory.getReturnType( targetAcessor ); } String targetPropertyName = Executables.getPropertyName( targetAcessor ); String mappedElement = "property '" + Executables.getPropertyName( sourceAccessor ) + "'"; MethodReference mappingMethodReference = mappingMethodResolver.getMappingMethodReferenceBasedOnMethod( method, mappedElement, mapperReferences, methods, sourceType, targetType, targetPropertyName, dateFormat ); TypeConversion conversion = mappingMethodResolver.getConversion( sourceType, targetType, dateFormat, conversionString ); PropertyMapping property = new PropertyMapping( parameter.getName(), Executables.getPropertyName( sourceAccessor ), sourceAccessor.getSimpleName().toString(), sourceType, Executables.getPropertyName( targetAcessor ), targetAcessor.getSimpleName().toString(), targetType, mappingMethodReference, conversion ); if ( !isPropertyMappable( property ) ) { // when not mappable, try again with another property mapping method based on parameter only. mappingMethodReference = mappingMethodResolver.getMappingMethodReferenceBasedOnParameter( method, "property '" + Executables.getPropertyName( sourceAccessor ) + "'", mapperReferences, methods, sourceType, targetType, targetPropertyName, dateFormat ); property = new PropertyMapping( parameter.getName(), Executables.getPropertyName( sourceAccessor ), sourceAccessor.getSimpleName().toString(), sourceType, Executables.getPropertyName( targetAcessor ), targetAcessor.getSimpleName().toString(), targetType, mappingMethodReference, conversion ); } if ( !isPropertyMappable( property ) ) { messager.printMessage( Kind.ERROR, String.format( "Can't map property \"%s %s\" to \"%s %s\".", property.getSourceType(), property.getSourceName(), property.getTargetType(), property.getTargetName() ), method.getExecutable() ); } return property; } #location 32 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private Type getType(TypeMirror mirror, boolean isLiteral) { if ( !canBeProcessed( mirror ) ) { throw new TypeHierarchyErroneousException( mirror ); } ImplementationType implementationType = getImplementationType( mirror ); boolean isIterableType = typeUtils.isSubtype( mirror, iterableType ); boolean isCollectionType = typeUtils.isSubtype( mirror, collectionType ); boolean isMapType = typeUtils.isSubtype( mirror, mapType ); boolean isStreamType = streamType != null && typeUtils.isSubtype( mirror, streamType ); boolean isEnumType; boolean isInterface; String name; String packageName; String qualifiedName; TypeElement typeElement; Type componentType; Boolean toBeImported = null; if ( mirror.getKind() == TypeKind.DECLARED ) { DeclaredType declaredType = (DeclaredType) mirror; isEnumType = declaredType.asElement().getKind() == ElementKind.ENUM; isInterface = declaredType.asElement().getKind() == ElementKind.INTERFACE; name = declaredType.asElement().getSimpleName().toString(); typeElement = (TypeElement) declaredType.asElement(); if ( typeElement != null ) { packageName = elementUtils.getPackageOf( typeElement ).getQualifiedName().toString(); qualifiedName = typeElement.getQualifiedName().toString(); } else { packageName = null; qualifiedName = name; } componentType = null; } else if ( mirror.getKind() == TypeKind.ARRAY ) { TypeMirror componentTypeMirror = getComponentType( mirror ); StringBuilder builder = new StringBuilder("[]"); while ( componentTypeMirror.getKind() == TypeKind.ARRAY ) { componentTypeMirror = getComponentType( componentTypeMirror ); builder.append( "[]" ); } if ( componentTypeMirror.getKind() == TypeKind.DECLARED ) { DeclaredType declaredType = (DeclaredType) componentTypeMirror; TypeElement componentTypeElement = (TypeElement) declaredType.asElement(); String arraySuffix = builder.toString(); name = componentTypeElement.getSimpleName().toString() + arraySuffix; packageName = elementUtils.getPackageOf( componentTypeElement ).getQualifiedName().toString(); qualifiedName = componentTypeElement.getQualifiedName().toString() + arraySuffix; } else if (componentTypeMirror.getKind().isPrimitive()) { // When the component type is primitive and is annotated with ElementType.TYPE_USE then // the typeMirror#toString returns (@CustomAnnotation :: byte) for the javac compiler name = NativeTypes.getName( componentTypeMirror.getKind() ) + builder.toString(); packageName = null; // for primitive types only name (e.g. byte, short..) required as qualified name qualifiedName = name; toBeImported = false; } else { name = mirror.toString(); packageName = null; qualifiedName = name; toBeImported = false; } isEnumType = false; isInterface = false; typeElement = null; componentType = getType( getComponentType( mirror ) ); } else { isEnumType = false; isInterface = false; // When the component type is primitive and is annotated with ElementType.TYPE_USE then // the typeMirror#toString returns (@CustomAnnotation :: byte) for the javac compiler name = mirror.getKind().isPrimitive() ? NativeTypes.getName( mirror.getKind() ) : mirror.toString(); packageName = null; qualifiedName = name; typeElement = null; componentType = null; toBeImported = false; } return new Type( typeUtils, elementUtils, this, roundContext.getAnnotationProcessorContext().getAccessorNaming(), mirror, typeElement, getTypeParameters( mirror, false ), implementationType, componentType, packageName, name, qualifiedName, isInterface, isEnumType, isIterableType, isCollectionType, isMapType, isStreamType, toBeImportedTypes, notToBeImportedTypes, toBeImported, isLiteral, loggingVerbose ); }
#vulnerable code private Type getType(TypeMirror mirror, boolean isLiteral) { if ( !canBeProcessed( mirror ) ) { throw new TypeHierarchyErroneousException( mirror ); } ImplementationType implementationType = getImplementationType( mirror ); boolean isIterableType = typeUtils.isSubtype( mirror, iterableType ); boolean isCollectionType = typeUtils.isSubtype( mirror, collectionType ); boolean isMapType = typeUtils.isSubtype( mirror, mapType ); boolean isStreamType = streamType != null && typeUtils.isSubtype( mirror, streamType ); boolean isEnumType; boolean isInterface; String name; String packageName; String qualifiedName; TypeElement typeElement; Type componentType; Boolean toBeImported = null; if ( mirror.getKind() == TypeKind.DECLARED ) { DeclaredType declaredType = (DeclaredType) mirror; isEnumType = declaredType.asElement().getKind() == ElementKind.ENUM; isInterface = declaredType.asElement().getKind() == ElementKind.INTERFACE; name = declaredType.asElement().getSimpleName().toString(); typeElement = (TypeElement) declaredType.asElement(); if ( typeElement != null ) { packageName = elementUtils.getPackageOf( typeElement ).getQualifiedName().toString(); qualifiedName = typeElement.getQualifiedName().toString(); } else { packageName = null; qualifiedName = name; } componentType = null; } else if ( mirror.getKind() == TypeKind.ARRAY ) { TypeMirror componentTypeMirror = getComponentType( mirror ); StringBuilder builder = new StringBuilder("[]"); while ( componentTypeMirror.getKind() == TypeKind.ARRAY ) { componentTypeMirror = getComponentType( componentTypeMirror ); builder.append( "[]" ); } if ( componentTypeMirror.getKind() == TypeKind.DECLARED ) { DeclaredType declaredType = (DeclaredType) componentTypeMirror; TypeElement componentTypeElement = (TypeElement) declaredType.asElement(); String arraySuffix = builder.toString(); name = componentTypeElement.getSimpleName().toString() + arraySuffix; packageName = elementUtils.getPackageOf( componentTypeElement ).getQualifiedName().toString(); qualifiedName = componentTypeElement.getQualifiedName().toString() + arraySuffix; } else if (componentTypeMirror.getKind().isPrimitive()) { // When the component type is primitive and is annotated with ElementType.TYPE_USE then // the typeMirror#toString returns (@CustomAnnotation :: byte) for the javac compiler name = NativeTypes.getName( componentTypeMirror.getKind() ) + builder.toString(); packageName = null; // for primitive types only name (e.g. byte, short..) required as qualified name qualifiedName = name; toBeImported = false; } else { name = mirror.toString(); packageName = null; qualifiedName = name; toBeImported = false; } isEnumType = false; isInterface = false; typeElement = null; componentType = getType( getComponentType( mirror ) ); } else { isEnumType = false; isInterface = false; name = mirror.toString(); packageName = null; qualifiedName = name; typeElement = null; componentType = null; toBeImported = false; } return new Type( typeUtils, elementUtils, this, roundContext.getAnnotationProcessorContext().getAccessorNaming(), mirror, typeElement, getTypeParameters( mirror, false ), implementationType, componentType, packageName, name, qualifiedName, isInterface, isEnumType, isIterableType, isCollectionType, isMapType, isStreamType, toBeImportedTypes, notToBeImportedTypes, toBeImported, isLiteral, loggingVerbose ); } #location 46 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private static Object getValue(final V8Array array, final int index, final V8Map<Object> cache) { int valueType = array.getType(index); switch (valueType) { case V8Value.INTEGER: return array.getInteger(index); case V8Value.DOUBLE: return array.getDouble(index); case V8Value.BOOLEAN: return array.getBoolean(index); case V8Value.STRING: return array.getString(index); case V8Value.V8_FUNCTION: return IGNORE; case V8Value.V8_ARRAY_BUFFER: V8ArrayBuffer buffer = (V8ArrayBuffer) array.get(index); try { return new ArrayBuffer(buffer.getBackingStore()); } finally { buffer.release(); } case V8Value.V8_TYPED_ARRAY: V8Array typedArray = array.getArray(index); try { return toTypedArray(typedArray); } finally { if (typedArray instanceof V8Array) { typedArray.release(); } } case V8Value.V8_ARRAY: V8Array arrayValue = array.getArray(index); try { return toList(arrayValue, cache); } finally { if (arrayValue instanceof V8Array) { arrayValue.release(); } } case V8Value.V8_OBJECT: V8Object objectValue = array.getObject(index); try { return toMap(objectValue, cache); } finally { if (objectValue instanceof V8Object) { objectValue.release(); } } case V8Value.NULL: return null; case V8Value.UNDEFINED: return V8.getUndefined(); default: throw new IllegalStateException("Cannot find type for index: " + index); } }
#vulnerable code private static Object getValue(final V8Array array, final int index, final V8Map<Object> cache) { int valueType = array.getType(index); switch (valueType) { case V8Value.INTEGER: return array.getInteger(index); case V8Value.DOUBLE: return array.getDouble(index); case V8Value.BOOLEAN: return array.getBoolean(index); case V8Value.STRING: return array.getString(index); case V8Value.V8_FUNCTION: return IGNORE; case V8Value.V8_ARRAY_BUFFER: V8ArrayBuffer buffer = (V8ArrayBuffer) array.get(index); try { return buffer.getBackingStore(); } finally { buffer.release(); } case V8Value.V8_TYPED_ARRAY: V8Array typedArray = array.getArray(index); try { return toTypedArray(typedArray); } finally { if (typedArray instanceof V8Array) { typedArray.release(); } } case V8Value.V8_ARRAY: V8Array arrayValue = array.getArray(index); try { return toList(arrayValue, cache); } finally { if (arrayValue instanceof V8Array) { arrayValue.release(); } } case V8Value.V8_OBJECT: V8Object objectValue = array.getObject(index); try { return toMap(objectValue, cache); } finally { if (objectValue instanceof V8Object) { objectValue.release(); } } case V8Value.NULL: return null; case V8Value.UNDEFINED: return V8.getUndefined(); default: throw new IllegalStateException("Cannot find type for index: " + index); } } #location 33 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public static V8 createV8Runtime(final String globalAlias, final String tempDirectory) { if (!nativeLibraryLoaded) { synchronized (lock) { if (!nativeLibraryLoaded) { load(tempDirectory); } } } checkNativeLibraryLoaded(); if (!initialized) { _setFlags(v8Flags); initialized = true; } V8 runtime = new V8(globalAlias); synchronized (lock) { runtimeCounter++; } return runtime; }
#vulnerable code public static V8 createV8Runtime(final String globalAlias, final String tempDirectory) { if (!nativeLibraryLoaded) { synchronized (lock) { if (!nativeLibraryLoaded) { load(tempDirectory); } } } checkNativeLibraryLoaded(); if (!initialized) { _setFlags(v8Flags); initialized = true; } if (debugThread == null) { debugThread = Thread.currentThread(); } V8 runtime = new V8(globalAlias); synchronized (lock) { runtimeCounter++; } return runtime; } #location 15 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private static Object getValue(final V8Array array, final int index, final V8Map<Object> cache) { int valueType = array.getType(index); switch (valueType) { case V8Value.INTEGER: return array.getInteger(index); case V8Value.DOUBLE: return array.getDouble(index); case V8Value.BOOLEAN: return array.getBoolean(index); case V8Value.STRING: return array.getString(index); case V8Value.V8_FUNCTION: return IGNORE; case V8Value.V8_ARRAY_BUFFER: V8ArrayBuffer buffer = (V8ArrayBuffer) array.get(index); try { return buffer.getBackingStore(); } finally { buffer.release(); } case V8Value.V8_TYPED_ARRAY: V8Array typedArray = array.getArray(index); try { return toTypedArray(typedArray); } finally { if (typedArray instanceof V8Array) { typedArray.release(); } } case V8Value.V8_ARRAY: V8Array arrayValue = array.getArray(index); try { return toList(arrayValue, cache); } finally { if (arrayValue instanceof V8Array) { arrayValue.release(); } } case V8Value.V8_OBJECT: V8Object objectValue = array.getObject(index); try { return toMap(objectValue, cache); } finally { if (objectValue instanceof V8Object) { objectValue.release(); } } case V8Value.NULL: return null; case V8Value.UNDEFINED: return V8.getUndefined(); default: throw new IllegalStateException("Cannot find type for index: " + index); } }
#vulnerable code private static Object getValue(final V8Array array, final int index, final V8Map<Object> cache) { int valueType = array.getType(index); switch (valueType) { case V8Value.INTEGER: return array.getInteger(index); case V8Value.DOUBLE: return array.getDouble(index); case V8Value.BOOLEAN: return array.getBoolean(index); case V8Value.STRING: return array.getString(index); case V8Value.V8_FUNCTION: return IGNORE; case V8Value.V8_ARRAY_BUFFER: V8ArrayBuffer buffer = (V8ArrayBuffer) array.get(index); try { return buffer.getBackingStore(); } finally { buffer.release(); } case V8Value.V8_TYPED_ARRAY: V8Array typedArray = array.getArray(index); try { return toByteBuffer(typedArray); } finally { if (typedArray instanceof V8Array) { typedArray.release(); } } case V8Value.V8_ARRAY: V8Array arrayValue = array.getArray(index); try { return toList(arrayValue, cache); } finally { if (arrayValue instanceof V8Array) { arrayValue.release(); } } case V8Value.V8_OBJECT: V8Object objectValue = array.getObject(index); try { return toMap(objectValue, cache); } finally { if (objectValue instanceof V8Object) { objectValue.release(); } } case V8Value.NULL: return null; case V8Value.UNDEFINED: return V8.getUndefined(); default: throw new IllegalStateException("Cannot find type for index: " + index); } } #location 24 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private static Object getValue(final V8Array array, final int index, final V8Map<Object> cache) { int valueType = array.getType(index); switch (valueType) { case V8Value.INTEGER: return array.getInteger(index); case V8Value.DOUBLE: return array.getDouble(index); case V8Value.BOOLEAN: return array.getBoolean(index); case V8Value.STRING: return array.getString(index); case V8Value.V8_FUNCTION: return IGNORE; case V8Value.V8_TYPED_ARRAY: V8Array typedArray = array.getArray(index); try { return toByteBuffer(typedArray); } finally { if (typedArray instanceof V8Array) { typedArray.release(); } } case V8Value.V8_ARRAY: V8Array arrayValue = array.getArray(index); try { return toList(arrayValue, cache); } finally { if (arrayValue instanceof V8Array) { arrayValue.release(); } } case V8Value.V8_OBJECT: V8Object objectValue = array.getObject(index); try { return toMap(objectValue, cache); } finally { if (objectValue instanceof V8Object) { objectValue.release(); } } case V8Value.NULL: return null; case V8Value.UNDEFINED: return V8.getUndefined(); default: throw new IllegalStateException("Cannot find type for index: " + index); } }
#vulnerable code private static Object getValue(final V8Array array, final int index, final V8Map<Object> cache) { int valueType = array.getType(index); switch (valueType) { case V8Value.INTEGER: return array.getInteger(index); case V8Value.DOUBLE: return array.getDouble(index); case V8Value.BOOLEAN: return array.getBoolean(index); case V8Value.STRING: return array.getString(index); case V8Value.V8_FUNCTION: return IGNORE; case V8Value.V8_TYPED_ARRAY: V8Array typedArray = array.getArray(index); try { return ((V8TypedArray) typedArray).getByteBuffer(); } finally { if (typedArray instanceof V8Array) { typedArray.release(); } } case V8Value.V8_ARRAY: V8Array arrayValue = array.getArray(index); try { return toList(arrayValue, cache); } finally { if (arrayValue instanceof V8Array) { arrayValue.release(); } } case V8Value.V8_OBJECT: V8Object objectValue = array.getObject(index); try { return toMap(objectValue, cache); } finally { if (objectValue instanceof V8Object) { objectValue.release(); } } case V8Value.NULL: return null; case V8Value.UNDEFINED: return V8.getUndefined(); default: throw new IllegalStateException("Cannot find type for index: " + index); } } #location 17 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testTypedArrayGetValue_Float32Array() { V8Array floatsArray = v8.executeArrayScript("var buf = new ArrayBuffer(100);\n" + "var floatsArray = new Float32Array(buf);\n" + "floatsArray[0] = 16.2;\n" + "floatsArray;\n"); V8TypedArray result = ((TypedArray) V8ObjectUtils.getValue(floatsArray)).getV8TypedArray(); assertEquals(25, result.length()); assertEquals(16.2, (Float) result.get(0), 0.00001); floatsArray.close(); result.close(); }
#vulnerable code @Test public void testTypedArrayGetValue_Float32Array() { V8Array floatsArray = v8.executeArrayScript("var buf = new ArrayBuffer(100);\n" + "var floatsArray = new Float32Array(buf);\n" + "floatsArray[0] = 16.2;\n" + "floatsArray;\n"); V8TypedArray result = (V8TypedArray) V8ObjectUtils.getValue(floatsArray); assertEquals(25, result.length()); assertEquals(16.2, (Float) result.get(0), 0.00001); floatsArray.close(); } #location 11 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public void release(final boolean reportMemoryLeaks) { if (isReleased()) { return; } checkThread(); releaseResources(); shutdownExecutors(forceTerminateExecutors); if (executors != null) { executors.clear(); } synchronized (lock) { runtimeCounter--; } _releaseRuntime(v8RuntimePtr); v8RuntimePtr = 0L; released = true; if (reportMemoryLeaks && (objectReferences > 0)) { throw new IllegalStateException(objectReferences + " Object(s) still exist in runtime"); } }
#vulnerable code public void release(final boolean reportMemoryLeaks) { if (isReleased()) { return; } checkThread(); if (debugEnabled) { disableDebugSupport(); } releaseResources(); shutdownExecutors(forceTerminateExecutors); if (executors != null) { executors.clear(); } synchronized (lock) { runtimeCounter--; } _releaseRuntime(v8RuntimePtr); v8RuntimePtr = 0L; released = true; if (reportMemoryLeaks && (objectReferences > 0)) { throw new IllegalStateException(objectReferences + " Object(s) still exist in runtime"); } } #location 7 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testTypedArrayGetValue_Float64Array() { V8Array floatsArray = v8.executeArrayScript("var buf = new ArrayBuffer(80);\n" + "var floatsArray = new Float64Array(buf);\n" + "floatsArray[0] = 16.2;\n" + "floatsArray;\n"); V8TypedArray result = ((TypedArray) V8ObjectUtils.getValue(floatsArray)).getV8TypedArray(); assertEquals(10, result.length()); assertEquals(16.2, (Double) result.get(0), 0.0001); floatsArray.close(); result.close(); }
#vulnerable code @Test public void testTypedArrayGetValue_Float64Array() { V8Array floatsArray = v8.executeArrayScript("var buf = new ArrayBuffer(80);\n" + "var floatsArray = new Float64Array(buf);\n" + "floatsArray[0] = 16.2;\n" + "floatsArray;\n"); V8TypedArray result = (V8TypedArray) V8ObjectUtils.getValue(floatsArray); assertEquals(10, result.length()); assertEquals(16.2, (Double) result.get(0), 0.0001); floatsArray.close(); } #location 11 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private static Object getValue(final V8Object object, final String key, final V8Map<Object> cache) { int valueType = object.getType(key); switch (valueType) { case V8Value.INTEGER: return object.getInteger(key); case V8Value.DOUBLE: return object.getDouble(key); case V8Value.BOOLEAN: return object.getBoolean(key); case V8Value.STRING: return object.getString(key); case V8Value.V8_FUNCTION: return IGNORE; case V8Value.V8_TYPED_ARRAY: V8Array typedArray = object.getArray(key); try { return toByteBuffer(typedArray); } finally { if (typedArray instanceof V8Array) { typedArray.release(); } } case V8Value.V8_ARRAY: V8Array array = object.getArray(key); try { return toList(array, cache); } finally { if (array instanceof V8Array) { array.release(); } } case V8Value.V8_OBJECT: V8Object child = object.getObject(key); try { return toMap(child, cache); } finally { if (child instanceof V8Object) { child.release(); } } case V8Value.NULL: return null; case V8Value.UNDEFINED: return V8.getUndefined(); default: throw new IllegalStateException("Cannot find type for key: " + key); } }
#vulnerable code private static Object getValue(final V8Object object, final String key, final V8Map<Object> cache) { int valueType = object.getType(key); switch (valueType) { case V8Value.INTEGER: return object.getInteger(key); case V8Value.DOUBLE: return object.getDouble(key); case V8Value.BOOLEAN: return object.getBoolean(key); case V8Value.STRING: return object.getString(key); case V8Value.V8_FUNCTION: return IGNORE; case V8Value.V8_TYPED_ARRAY: V8Array typedArray = object.getArray(key); try { return ((V8TypedArray) typedArray).getByteBuffer(); } finally { if (typedArray instanceof V8Array) { typedArray.release(); } } case V8Value.V8_ARRAY: V8Array array = object.getArray(key); try { return toList(array, cache); } finally { if (array instanceof V8Array) { array.release(); } } case V8Value.V8_OBJECT: V8Object child = object.getObject(key); try { return toMap(child, cache); } finally { if (child instanceof V8Object) { child.release(); } } case V8Value.NULL: return null; case V8Value.UNDEFINED: return V8.getUndefined(); default: throw new IllegalStateException("Cannot find type for key: " + key); } } #location 17 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public boolean enableDebugSupport(final int port, final boolean waitForConnection) { V8.checkDebugThread(); debugEnabled = enableDebugSupport(getV8RuntimePtr(), port, waitForConnection); return debugEnabled; }
#vulnerable code public boolean enableDebugSupport(final int port, final boolean waitForConnection) { V8.checkDebugThread(); debugEnabled = enableDebugSupport(getHandle(), port, waitForConnection); return debugEnabled; } #location 3 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public CommandLine parse(final String[] args) throws ParseException, IllegalArgumentException { final CommandLine cmd = super.parse(args); final String[] files = cmd.getArgs(); if (files.length > 1) { throw new IllegalArgumentException("Only one dump file path may be passed at a time."); } final Report report = ReportFactory.createReport(cmd, ReportType.REDIS); final OutputStream output; // Write to stdout if no path is specified. if (0 == files.length) { logger.info("No path given. Writing to standard output."); output = System.out; } else { try { output = new FileOutputStream(files[0]); } catch (FileNotFoundException e) { throw new RuntimeException("Unable to open dump file for writing.", e); } } ExtractionResult match = null; if (cmd.hasOption("reporter-status")) { match = ExtractionResult.get(((Number) cmd.getParsedOptionValue("reporter-status"))); if (null == match) { throw new IllegalArgumentException(String.format("%s is not a valid report status.", cmd.getOptionValue("reporter-status"))); } } final ProgressBar progressBar = ConsoleProgressBar.on(System.err) .withFormat("[:bar] :percent% :elapsed/:total ETA: :eta") .withTotalSteps(report.size()); final ObjectMapper mapper = new ObjectMapper(); final SimpleModule module = new SimpleModule(); module.addSerializer(Report.class, new ReportSerializer(progressBar, match)); mapper.registerModule(module); try ( final JsonGenerator jsonGenerator = new JsonFactory() .setCodec(mapper) .createGenerator(output, JsonEncoding.UTF8) ) { jsonGenerator.useDefaultPrettyPrinter(); jsonGenerator.writeObject(report); jsonGenerator.writeRaw('\n'); } catch (IOException e) { throw new RuntimeException("Unable to output JSON.", e); } try { report.close(); } catch (IOException e) { throw new RuntimeException("Exception while closing report.", e); } return cmd; }
#vulnerable code public CommandLine parse(final String[] args) throws ParseException, IllegalArgumentException { final CommandLine cmd = super.parse(args); final Report report = ReportFactory.createReport(cmd, ReportType.REDIS); ExtractionResult match = null; if (cmd.hasOption("reporter-status")) { match = ExtractionResult.get(((Number) cmd.getParsedOptionValue("reporter-status"))); if (null == match) { throw new IllegalArgumentException(String.format("%s is not a valid report status.", cmd.getOptionValue("reporter-status"))); } } final ProgressBar progressBar = ConsoleProgressBar.on(System.err) .withFormat("[:bar] :percent% :elapsed/:total ETA: :eta") .withTotalSteps(report.size()); final ObjectMapper mapper = new ObjectMapper(); final SimpleModule module = new SimpleModule(); module.addSerializer(Report.class, new ReportSerializer(progressBar, match)); mapper.registerModule(module); try ( final JsonGenerator jsonGenerator = new JsonFactory() .setCodec(mapper) .createGenerator(System.out, JsonEncoding.UTF8) ) { jsonGenerator.useDefaultPrettyPrinter(); jsonGenerator.writeObject(report); jsonGenerator.writeRaw('\n'); } catch (IOException e) { throw new RuntimeException("Unable to output JSON.", e); } try { report.close(); } catch (IOException e) { throw new RuntimeException("Exception while closing report.", e); } return cmd; } #location 18 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testCBC_UserAgentUsingPlainTextNegotiation() throws Exception{ ServerSocket serverSocket = new ServerSocket(0); final int availablePort = serverSocket.getLocalPort(); serverSocket.close(); //Creates non-ssl server. createServer(availablePort); BigtableOptions bigtableOptions = BigtableOptions.builder() .setDataHost("localhost") .setAdminHost("localhost") .setProjectId(TEST_PROJECT_ID) .setInstanceId(TEST_INSTANCE_ID) .setUserAgent(TEST_USER_AGENT) .setUsePlaintextNegotiation(true) .setCredentialOptions(CredentialOptions.nullCredential()) .setPort(availablePort) .build(); xGoogApiPattern = Pattern.compile(".* cbt/.*"); try (BigtableSession session = new BigtableSession(bigtableOptions)) { session.getClientWrapper() .readFlatRows(Query.create("fake-table")).next(); Assert.assertTrue(serverPasses.get()); } }
#vulnerable code @Test public void testCBC_UserAgentUsingPlainTextNegotiation() throws Exception{ ServerSocket serverSocket = new ServerSocket(0); final int availablePort = serverSocket.getLocalPort(); serverSocket.close(); //Creates non-ssl server. createServer(availablePort); BigtableOptions bigtableOptions = BigtableOptions.builder() .setDataHost("localhost") .setAdminHost("localhost") .setProjectId(TEST_PROJECT_ID) .setInstanceId(TEST_INSTANCE_ID) .setUserAgent(TEST_USER_AGENT) .setUsePlaintextNegotiation(true) .setCredentialOptions(CredentialOptions.nullCredential()) .setPort(availablePort) .build(); xGoogApiPattern = Pattern.compile(".* cbt/.*"); try (BigtableSession session = new BigtableSession(bigtableOptions)) { session.getDataClient() .readFlatRows(ReadRowsRequest.getDefaultInstance()).next(); Assert.assertTrue(serverPasses.get()); } } #location 25 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testRetyableCheckAndMutateRow() throws Exception { final CheckAndMutateRowRequest request = CheckAndMutateRowRequest.getDefaultInstance(); when(mockFuture.get()).thenReturn(CheckAndMutateRowResponse.getDefaultInstance()); underTest.checkAndMutateRow(request); verify(clientCallService, times(1)).listenableAsyncCall(any(ClientCall.class), same(request)); }
#vulnerable code @Test public void testRetyableCheckAndMutateRow() throws InterruptedException { final CheckAndMutateRowRequest request = CheckAndMutateRowRequest.getDefaultInstance(); final AtomicBoolean done = new AtomicBoolean(false); executor.submit(new Callable<Void>(){ @Override public Void call() throws Exception { underTest.checkAndMutateRow(request); done.set(true); synchronized (done) { done.notify(); } return null; } }); Thread.sleep(100); future.set(CheckAndMutateRowResponse.getDefaultInstance()); synchronized (done) { done.wait(1000); } assertTrue(done.get()); verify(clientCallService, times(1)).listenableAsyncCall(any(ClientCall.class), same(request)); } #location 17 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test /** * Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh * logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it * triggers a call to syncRefresh() which potentially waits for refresh that was initiated * from another thread either through syncRefresh() or asyncRefresh(). This test case simulates * that condition. */ public void testRefreshDoesntHang() throws Exception, TimeoutException { // Assume that the user starts at this time... it's an arbitrarily big number which will // assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not // be negative. long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10; setTime(start); // RefreshingOAuth2CredentialsInterceptor will show that the access token is stale. final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1; // Create a mechanism that will allow us to control when the accessToken is returned. // credentials.refreshAccessToken() will get called asynchronously and will wait until the // lock is notified before returning. That will allow us to set up multiple concurrent calls final Object lock = new String(""); Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() { @Override public AccessToken answer(InvocationOnMock invocation) throws Throwable { synchronized (lock) { lock.wait(); } return new AccessToken("", new Date(expiration)); } }); // Force a synchronous refresh. This ought to wait until a refresh happening in another thread // completes. Callable<Void> syncRefreshCallable = new Callable<Void>() { @Override public Void call() throws Exception { underTest.syncRefresh(); return null; } }; underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials); // At this point, the access token wasn't retrieved yet. The // RefreshingOAuth2CredentialsInterceptor considers null to be Expired. Assert.assertEquals(CacheState.Expired, RefreshingOAuth2CredentialsInterceptor.getCacheState(underTest.headerCache.get())); syncCall(lock, syncRefreshCallable); // Check to make sure that the AccessToken was retrieved. Assert.assertEquals(CacheState.Stale, RefreshingOAuth2CredentialsInterceptor.getCacheState(underTest.headerCache.get())); // Check to make sure we're no longer refreshing. Assert.assertFalse(underTest.isRefreshing.get()); // Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be // necessary, but also should not be harmful, since there are likely to be multiple concurrent // requests that call asyncRefresh() when the token turns stale. underTest.asyncRefresh(); underTest.asyncRefresh(); underTest.asyncRefresh(); syncCall(lock, syncRefreshCallable); Assert.assertFalse(underTest.isRefreshing.get()); }
#vulnerable code @Test /** * Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh * logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it * triggers a call to syncRefresh() which potentially waits for refresh that was initiated * from another thread either through syncRefresh() or asyncRefresh(). This test case simulates * that condition. */ public void testRefreshDoesntHang() throws Exception, TimeoutException { // Assume that the user starts at this time... it's an arbitrarily big number which will // assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not // be negative. long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10; setTime(start); // RefreshingOAuth2CredentialsInterceptor will show that the access token is stale. final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1; // Create a mechanism that will allow us to control when the accessToken is returned. // credentials.refreshAccessToken() will get called asynchronously and will wait until the // lock is notified before returning. That will allow us to set up multiple concurrent calls final Object lock = new String(""); Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() { @Override public AccessToken answer(InvocationOnMock invocation) throws Throwable { synchronized (lock) { lock.wait(); } return new AccessToken("", new Date(expiration)); } }); // Force a synchronous refresh. This ought to wait until a refresh happening in another thread // completes. Callable<Void> syncRefreshCallable = new Callable<Void>() { @Override public Void call() throws Exception { underTest.syncRefresh(); return null; } }; underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials); // At this point, the access token wasn't retrieved yet. The // RefreshingOAuth2CredentialsInterceptor considers null to be Expired. Assert.assertEquals(CacheState.Expired, RefreshingOAuth2CredentialsInterceptor.getCacheState(underTest.headerCache.get())); Future<Void> future = executorService.submit(syncRefreshCallable); // let the Thread running syncRefreshCallable() have a turn so that it can initiate the call // to refreshAccessToken(). Thread.yield(); synchronized(lock) { lock.notifyAll(); } // Try to get the access token, which should be calculated at this point. There's // a possibility that some hanging occurs in the test code. If the operation times out // so timeout after 1 second, this will throw a TimeoutException. future.get(1, TimeUnit.SECONDS); // Check to make sure that the AccessToken was retrieved. Assert.assertEquals(CacheState.Stale, RefreshingOAuth2CredentialsInterceptor.getCacheState(underTest.headerCache.get())); // Check to make sure we're no longer refreshing. Assert.assertFalse(underTest.isRefreshing.get()); // Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be // necessary, but also should not be harmful, since there are likely to be multiple concurrent // requests that call asyncRefresh() when the token turns stale. underTest.asyncRefresh(); underTest.asyncRefresh(); underTest.asyncRefresh(); future = executorService.submit(syncRefreshCallable); // Let the asyncRefreshes do their thing. Thread.yield(); // There should be a single thread kicked off by the underTest.asyncRefresh() calls about // actually doing a refresh at this point; the other ones will have see that a refresh is in // progress and finish the invocation of the Thread without performing a refres().. Make sure // that at least 1 refresh process is in progress. Assert.assertTrue(underTest.isRefreshing.get()); synchronized(lock) { // Release the lock so that all of the async refreshing can complete. lock.notifyAll(); } // Wait for no more than a second to make sure that the call to underTest.syncRefresh() // completes properly. If a second passes without syncRefresh() completing, future.get(..) // will throw a TimeoutException. future.get(1, TimeUnit.SECONDS); Assert.assertFalse(underTest.isRefreshing.get()); } #location 79 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testStaleAndExpired() throws IOException { long expiration = HeaderCacheElement.TOKEN_STALENESS_MS + 1; initialize(expiration); Assert.assertEquals(CacheState.Good, underTest.getCacheState()); long startTime = 2L; setTimeInMillieconds(startTime); Assert.assertEquals(CacheState.Stale, underTest.getCacheState()); long expiredStaleDiff = HeaderCacheElement.TOKEN_STALENESS_MS - HeaderCacheElement.TOKEN_EXPIRES_MS; setTimeInMillieconds(startTime + expiredStaleDiff); Assert.assertEquals(CacheState.Expired, underTest.getCacheState()); }
#vulnerable code @Test public void testStaleAndExpired() throws IOException { long expiration = HeaderCacheElement.TOKEN_STALENESS_MS + 1; initialize(expiration); Assert.assertEquals(CacheState.Good, underTest.headerCache.getCacheState()); long startTime = 2L; setTimeInMillieconds(startTime); Assert.assertEquals(CacheState.Stale, underTest.headerCache.getCacheState()); long expiredStaleDiff = HeaderCacheElement.TOKEN_STALENESS_MS - HeaderCacheElement.TOKEN_EXPIRES_MS; setTimeInMillieconds(startTime + expiredStaleDiff); Assert.assertEquals(CacheState.Expired, underTest.headerCache.getCacheState()); } #location 5 #vulnerability type UNSAFE_GUARDED_BY_ACCESS
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public void onClose(Status status, Metadata trailers) { if (status.getCode() == Status.Code.CANCELLED && status.getDescription() != null && status.getDescription().contains(TIMEOUT_CANCEL_MSG)) { // If this was canceled because of handleTimeout(). The cancel is immediately retried or // completed in another fashion. return; } super.onClose(status, trailers); }
#vulnerable code @Override public void onClose(Status status, Metadata trailers) { if (status.getCode() == Status.Code.CANCELLED && status.getDescription().contains(TIMEOUT_CANCEL_MSG)) { // If this was canceled because of handleTimeout(). The cancel is immediately retried or // completed in another fashion. return; } super.onClose(status, trailers); } #location 4 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test /* * Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh * logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it * triggers a call to syncRefresh() which potentially waits for refresh that was initiated * from another thread either through syncRefresh() or asyncRefresh(). This test case simulates * that condition. */ public void testRefreshDoesntHang() throws Exception { // Assume that the user starts at this time... it's an arbitrarily big number which will // assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not // be negative. long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10; setTimeInMillieconds(start); // RefreshingOAuth2CredentialsInterceptor will show that the access token is stale. final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1; // Create a mechanism that will allow us to control when the accessToken is returned. // credentials.refreshAccessToken() will get called asynchronously and will wait until the // lock is notified before returning. That will allow us to set up multiple concurrent calls final Object lock = new Object(); Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() { @Override public AccessToken answer(InvocationOnMock invocation) throws Throwable { synchronized (lock) { lock.wait(); } return new AccessToken("", new Date(expiration)); } }); // Force a synchronous refresh. This ought to wait until a refresh happening in another thread // completes. Callable<Void> syncRefreshCallable = new Callable<Void>() { @Override public Void call() throws Exception { underTest.syncRefresh(); return null; } }; underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials); // At this point, the access token wasn't retrieved yet. The // RefreshingOAuth2CredentialsInterceptor considers null to be Expired. Assert.assertEquals(CacheState.Expired, underTest.headerCache.getCacheState()); syncCall(lock, syncRefreshCallable); // Check to make sure that the AccessToken was retrieved. Assert.assertEquals(CacheState.Stale, underTest.headerCache.getCacheState()); // Check to make sure we're no longer refreshing. Assert.assertFalse(underTest.isRefreshing()); // Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be // necessary, but also should not be harmful, since there are likely to be multiple concurrent // requests that call asyncRefresh() when the token turns stale. underTest.asyncRefresh(); underTest.asyncRefresh(); underTest.asyncRefresh(); syncCall(lock, syncRefreshCallable); Assert.assertFalse(underTest.isRefreshing()); }
#vulnerable code @Test /* * Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh * logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it * triggers a call to syncRefresh() which potentially waits for refresh that was initiated * from another thread either through syncRefresh() or asyncRefresh(). This test case simulates * that condition. */ public void testRefreshDoesntHang() throws Exception { // Assume that the user starts at this time... it's an arbitrarily big number which will // assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not // be negative. long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10; setTimeInMillieconds(start); // RefreshingOAuth2CredentialsInterceptor will show that the access token is stale. final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1; // Create a mechanism that will allow us to control when the accessToken is returned. // credentials.refreshAccessToken() will get called asynchronously and will wait until the // lock is notified before returning. That will allow us to set up multiple concurrent calls final Object lock = new Object(); Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() { @Override public AccessToken answer(InvocationOnMock invocation) throws Throwable { synchronized (lock) { lock.wait(); } return new AccessToken("", new Date(expiration)); } }); // Force a synchronous refresh. This ought to wait until a refresh happening in another thread // completes. Callable<Void> syncRefreshCallable = new Callable<Void>() { @Override public Void call() throws Exception { underTest.syncRefresh(); return null; } }; underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials); underTest.rateLimiter.setRate(100000); // At this point, the access token wasn't retrieved yet. The // RefreshingOAuth2CredentialsInterceptor considers null to be Expired. Assert.assertEquals(CacheState.Expired, underTest.headerCache.getCacheState()); syncCall(lock, syncRefreshCallable); // Check to make sure that the AccessToken was retrieved. Assert.assertEquals(CacheState.Stale, underTest.headerCache.getCacheState()); // Check to make sure we're no longer refreshing. synchronized (underTest.lock) { Assert.assertFalse(underTest.isRefreshing); } // Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be // necessary, but also should not be harmful, since there are likely to be multiple concurrent // requests that call asyncRefresh() when the token turns stale. underTest.asyncRefresh(); underTest.asyncRefresh(); underTest.asyncRefresh(); syncCall(lock, syncRefreshCallable); Assert.assertFalse(underTest.isRefreshing); } #location 58 #vulnerability type UNSAFE_GUARDED_BY_ACCESS
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testSyncRefresh() throws IOException { initialize(HeaderCacheElement.TOKEN_STALENESS_MS + 1); Assert.assertEquals(CacheState.Good, underTest.headerCache.getCacheState()); Assert.assertFalse(underTest.isRefreshing()); }
#vulnerable code @Test public void testSyncRefresh() throws IOException { initialize(HeaderCacheElement.TOKEN_STALENESS_MS + 1); Assert.assertEquals(CacheState.Good, underTest.headerCache.getCacheState()); } #location 4 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public void setMessageCompression(boolean enable) { throw new UnsupportedOperationException("setMessageCompression()"); }
#vulnerable code @Override public void setMessageCompression(boolean enable) { call.setMessageCompression(enable); } #location 3 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public void modifyTable(TableName tableName, TableDescriptor tableDescriptor) throws IOException { super.modifyTable(tableName, new HTableDescriptor(tableDescriptor)); }
#vulnerable code @Override public void modifyTable(TableName tableName, TableDescriptor tableDescriptor) throws IOException { if (isTableAvailable(tableName)) { TableDescriptor currentTableDescriptor = getTableDescriptor(tableName); List<Modification> modifications = new ArrayList<>(); List<HColumnDescriptor> columnDescriptors = tableAdapter2x.toHColumnDescriptors(tableDescriptor); List<HColumnDescriptor> currentColumnDescriptors = tableAdapter2x.toHColumnDescriptors(currentTableDescriptor); modifications.addAll(tableModificationAdapter.buildModifications(columnDescriptors, currentColumnDescriptors)); modifyColumn(tableName, "modifyTable", "update", (Modification[]) modifications.toArray()); } else { throw new TableNotFoundException(tableName); } } #location 7 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public void onClose(Status status, Metadata trailers) { synchronized (callLock) { call = null; } rpcTimerContext.close(); // OK if (status.isOk()) { if (onOK(trailers)) { operationTimerContext.close(); } } else { onError(status, trailers); } }
#vulnerable code @Override public void onClose(Status status, Metadata trailers) { synchronized (callLock) { call = null; } rpcTimerContext.close(); Status.Code code = status.getCode(); // OK if (code == Status.Code.OK) { if (onOK()) { operationTimerContext.close(); } return; } // CANCELLED if (code == Status.Code.CANCELLED) { // An explicit user cancellation is not considered a failure. operationTimerContext.close(); return; } // Non retry scenario if (!retryOptions.enableRetries() || !retryOptions.isRetryable(code) // Unauthenticated is special because the request never made it to // to the server, so all requests are retryable || !(isRequestRetryable() || code == Code.UNAUTHENTICATED)) { rpc.getRpcMetrics().markFailure(); operationTimerContext.close(); setException(status.asRuntimeException()); return; } // Attempt retry with backoff long nextBackOff = getNextBackoff(); failedCount += 1; // Backoffs timed out. if (nextBackOff == BackOff.STOP) { rpc.getRpcMetrics().markRetriesExhasted(); operationTimerContext.close(); String message = String.format("Exhausted retries after %d failures.", failedCount); StatusRuntimeException cause = status.asRuntimeException(); setException(new BigtableRetriesExhaustedException(message, cause)); return; } else { String channelId = ChannelPool.extractIdentifier(trailers); LOG.info("Retrying failed call. Failure #%d, got: %s on channel %s", status.getCause(), failedCount, status, channelId); } performRetry(nextBackOff); } #location 39 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private void retryOnTimeout(ScanTimeoutException rte) throws BigtableRetriesExhaustedException { LOG.info("The client could not get a response in %d ms. Retrying the scan.", retryOptions.getReadPartialRowTimeoutMillis()); // Cancel the existing rpc. cancel(TIMEOUT_CANCEL_MSG); rpcTimerContext.close(); failedCount++; // Can this request be retried int maxRetries = retryOptions.getMaxScanTimeoutRetries(); if (retryOptions.enableRetries() && ++timeoutRetryCount <= maxRetries) { resetStatusBasedBackoff(); performRetry(0); } else { throw getExhaustedRetriesException(Status.ABORTED); } }
#vulnerable code private void retryOnTimeout(ScanTimeoutException rte) throws BigtableRetriesExhaustedException { LOG.info("The client could not get a response in %d ms. Retrying the scan.", retryOptions.getReadPartialRowTimeoutMillis()); // Cancel the existing rpc. cancel(TIMEOUT_CANCEL_MSG); rpcTimerContext.close(); failedCount++; // Can this request be retried int maxRetries = retryOptions.getMaxScanTimeoutRetries(); if (retryOptions.enableRetries() && ++timeoutRetryCount <= maxRetries) { rpc.getRpcMetrics().markRetry(); resetStatusBasedBackoff(); run(); } else { throw getExhaustedRetriesException(Status.ABORTED); } } #location 15 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public void onClose(Status status, Metadata trailers) { try (Scope scope = TRACER.withSpan(operationSpan)) { callWrapper.resetCall(); rpcTimerContext.close(); // OK if (status.isOk()) { if (onOK(trailers)) { finalizeStats(status); } } else { onError(status, trailers); } } catch (Exception e) { setException(e); } }
#vulnerable code @Override public void onClose(Status status, Metadata trailers) { try (Scope scope = TRACER.withSpan(operationSpan)) { synchronized (callLock) { call = NULL_CALL; } rpcTimerContext.close(); // OK if (status.isOk()) { if (onOK(trailers)) { finalizeStats(status); } } else { onError(status, trailers); } } catch (Exception e) { setException(e); } } #location 7 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public static Credentials getCredentials(CredentialOptions options) throws IOException, GeneralSecurityException { return patchCredentials(getCredentialsInner(options)); }
#vulnerable code public static Credentials getCredentials(CredentialOptions options) throws IOException, GeneralSecurityException { switch (options.getCredentialType()) { case DefaultCredentials: return getApplicationDefaultCredential(); case P12: P12CredentialOptions p12Options = (P12CredentialOptions) options; return getCredentialFromPrivateKeyServiceAccount( p12Options.getServiceAccount(), p12Options.getKeyFile()); case SuppliedCredentials: return ((UserSuppliedCredentialOptions) options).getCredential(); case SuppliedJson: JsonCredentialsOptions jsonCredentialsOptions = (JsonCredentialsOptions) options; synchronized (jsonCredentialsOptions) { if (jsonCredentialsOptions.getCachedCredentials() == null) { jsonCredentialsOptions.setCachedCredentails( getInputStreamCredential(jsonCredentialsOptions.getInputStream())); } return jsonCredentialsOptions.getCachedCredentials(); } case None: return null; default: throw new IllegalStateException( "Cannot process Credential type: " + options.getCredentialType()); } } #location 5 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @SuppressWarnings("unchecked") @Override public void run() { try { // restart the clock. synchronized (callLock) { super.run(); // pre-fetch one more result, for performance reasons. adapter.request(1); if (rowObserver instanceof ClientResponseObserver) { ((ClientResponseObserver<ReadRowsRequest, FlatRow>) rowObserver).beforeStart(adapter); } lastResponseMs = clock.currentTimeMillis(); } } catch (Exception e) { setException(e); } }
#vulnerable code @SuppressWarnings("unchecked") @Override public void run() { try { // restart the clock. this.rowMerger = new RowMerger(rowObserver); adapter = new CallToStreamObserverAdapter(); synchronized (callLock) { super.run(); // pre-fetch one more result, for performance reasons. adapter.request(1); if (rowObserver instanceof ClientResponseObserver) { ((ClientResponseObserver<ReadRowsRequest, FlatRow>) rowObserver).beforeStart(adapter); } lastResponseMs = clock.currentTimeMillis(); } } catch (Exception e) { setException(e); } } #location 6 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testBatchBulkGets() throws Exception { final List<Get> gets = new ArrayList<>(10); final List<ApiFuture<Result>> expected = new ArrayList<>(10); gets.add(new Get(Bytes.toBytes("key0"))); expected.add(ApiFutures.<Result>immediateFuture(null)); for (int i = 1; i < 10; i++) { byte[] row_key = randomBytes(8); gets.add(new Get(row_key)); ByteString key = ByteStringer.wrap(row_key); ByteString cellValue = ByteString.copyFrom(randomBytes(8)); expected.add( ApiFutures.immediateFuture( Result.create( ImmutableList.<Cell>of( new RowCell( key.toByteArray(), Bytes.toBytes("family"), Bytes.toBytes(""), System.nanoTime() / 1000, cellValue.toByteArray()))))); } // Test 10 gets, but return only 9 to test the row not found case. when(mockBulkRead.add(any(Query.class))) .then( new Answer<ApiFuture<Result>>() { final AtomicInteger counter = new AtomicInteger(); @Override public ApiFuture<Result> answer(InvocationOnMock invocation) throws Throwable { return expected.get(counter.getAndIncrement()); } }); ByteString key = ByteStringer.wrap(randomBytes(8)); ByteString cellValue = ByteString.copyFrom(randomBytes(8)); Result row = Result.create( ImmutableList.<Cell>of( new RowCell( key.toByteArray(), Bytes.toBytes("family"), Bytes.toBytes(""), 1000L, cellValue.toByteArray()))); when(mockFuture.get()).thenReturn(row); Result[] results = createExecutor().batch(gets); verify(mockBulkRead, times(10)).add(any(Query.class)); verify(mockBulkRead, times(1)).flush(); assertTrue(matchesRow(Result.EMPTY_RESULT).matches(results[0])); for (int i = 1; i < results.length; i++) { assertTrue( "Expected " + Bytes.toString(gets.get(i).getRow()) + " but was " + Bytes.toString(results[i].getRow()), Bytes.equals(results[i].getRow(), gets.get(i).getRow())); } }
#vulnerable code @Test public void testBatchBulkGets() throws Exception { final List<Get> gets = new ArrayList<>(10); final List<ApiFuture<FlatRow>> expected = new ArrayList<>(10); gets.add(new Get(Bytes.toBytes("key0"))); expected.add(ApiFutures.<FlatRow>immediateFuture(null)); for (int i = 1; i < 10; i++) { byte[] row_key = randomBytes(8); gets.add(new Get(row_key)); ByteString key = ByteStringer.wrap(row_key); ByteString cellValue = ByteString.copyFrom(randomBytes(8)); expected.add( ApiFutures.immediateFuture( FlatRow.newBuilder() .withRowKey(key) .addCell("family", ByteString.EMPTY, System.nanoTime() / 1000, cellValue) .build())); } // Test 10 gets, but return only 9 to test the row not found case. when(mockBulkRead.add(any(Query.class))) .then( new Answer<ApiFuture<FlatRow>>() { final AtomicInteger counter = new AtomicInteger(); @Override public ApiFuture<FlatRow> answer(InvocationOnMock invocation) throws Throwable { return expected.get(counter.getAndIncrement()); } }); ByteString key = ByteStringer.wrap(randomBytes(8)); ByteString cellValue = ByteString.copyFrom(randomBytes(8)); FlatRow row = FlatRow.newBuilder() .withRowKey(key) .addCell("family", ByteString.EMPTY, System.nanoTime() / 1000, cellValue) .build(); when(mockFuture.get()).thenReturn(row); Result[] results = createExecutor().batch(gets); verify(mockBulkRead, times(10)).add(any(Query.class)); verify(mockBulkRead, times(1)).flush(); Assert.assertTrue(matchesRow(Result.EMPTY_RESULT).matches(results[0])); for (int i = 1; i < results.length; i++) { Assert.assertTrue( "Expected " + Bytes.toString(gets.get(i).getRow()) + " but was " + Bytes.toString(results[i].getRow()), Bytes.equals(results[i].getRow(), gets.get(i).getRow())); } } #location 41 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testPartialResults() throws Exception { when(mockBigtableApi.getDataClient()).thenReturn(mockDataClientWrapper); when(mockDataClientWrapper.createBulkRead(isA(String.class))).thenReturn(mockBulkRead); byte[] key1 = randomBytes(8); byte[] key2 = randomBytes(8); Result expected = Result.create( ImmutableList.<org.apache.hadoop.hbase.Cell>of( new RowCell( key1, Bytes.toBytes("cf"), Bytes.toBytes(""), 10, Bytes.toBytes("hi!"), ImmutableList.<String>of()))); RuntimeException exception = new RuntimeException("Something bad happened"); when(mockBulkRead.add(any(Query.class))) .thenReturn(ApiFutures.immediateFuture(expected)) .thenReturn(ApiFutures.<Result>immediateFailedFuture(exception)); List<Get> gets = Arrays.asList(new Get(key1), new Get(key2)); Object[] results = new Object[2]; try { createExecutor().batch(gets, results); } catch (RetriesExhaustedWithDetailsException ignored) { } assertTrue("first result is a result", results[0] instanceof Result); assertTrue(matchesRow(expected).matches(results[0])); Assert.assertEquals(exception, results[1]); }
#vulnerable code @Test public void testPartialResults() throws Exception { when(mockBigtableApi.getDataClient()).thenReturn(mockDataClientWrapper); when(mockDataClientWrapper.createBulkRead(isA(String.class))).thenReturn(mockBulkRead); byte[] key1 = randomBytes(8); byte[] key2 = randomBytes(8); Result result = Result.create( ImmutableList.<org.apache.hadoop.hbase.Cell>of( new RowCell( key1, "cf".getBytes(), "".getBytes(), 10, "hi!".getBytes(), ImmutableList.<String>of()))); RuntimeException exception = new RuntimeException("Something bad happened"); when(mockBulkRead.add(any(Query.class))) .thenReturn(ApiFutures.immediateFuture(result)) .thenReturn(ApiFutures.<Result>immediateFailedFuture(exception)); List<Get> gets = Arrays.asList(new Get(key1), new Get(key2)); Object[] results = new Object[2]; try { createExecutor().batch(gets, results); } catch (RetriesExhaustedWithDetailsException ignored) { } Assert.assertTrue("first result is a result", results[0] instanceof Result); Assert.assertArrayEquals(key1, ((Result) results[0]).getRow()); Assert.assertEquals(exception, results[1]); } #location 26 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testCBC_UserAgentUsingPlainTextNegotiation() throws Exception{ ServerSocket serverSocket = new ServerSocket(0); final int availablePort = serverSocket.getLocalPort(); serverSocket.close(); //Creates non-ssl server. createServer(availablePort); BigtableOptions bigtableOptions = BigtableOptions.builder() .setDataHost("localhost") .setAdminHost("localhost") .setProjectId(TEST_PROJECT_ID) .setInstanceId(TEST_INSTANCE_ID) .setUserAgent(TEST_USER_AGENT) .setUsePlaintextNegotiation(true) .setCredentialOptions(CredentialOptions.nullCredential()) .setPort(availablePort) .build(); xGoogApiPattern = Pattern.compile(".* cbt/.*"); try (BigtableSession session = new BigtableSession(bigtableOptions)) { session.getDataClient() .readFlatRows(ReadRowsRequest.getDefaultInstance()).next(); Assert.assertTrue(serverPasses.get()); } }
#vulnerable code @Test public void testCBC_UserAgentUsingPlainTextNegotiation() throws Exception{ ServerSocket serverSocket = new ServerSocket(0); final int availablePort = serverSocket.getLocalPort(); serverSocket.close(); //Creates non-ssl server. createServer(availablePort); BigtableOptions bigtableOptions = BigtableOptions.builder() .setDataHost("localhost") .setAdminHost("localhost") .setProjectId(TEST_PROJECT_ID) .setInstanceId(TEST_INSTANCE_ID) .setUserAgent(TEST_USER_AGENT) .setUsePlaintextNegotiation(true) .setCredentialOptions(CredentialOptions.nullCredential()) .setPort(availablePort) .build(); xGoogApiPattern = Pattern.compile(".* cbt/.*"); new BigtableSession(bigtableOptions).getDataClient() .readFlatRows(ReadRowsRequest.getDefaultInstance()).next(); Assert.assertTrue(serverPasses.get()); } #location 23 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private Result[] batch(final List<? extends org.apache.hadoop.hbase.client.Row> actions) throws Exception { return createExecutor().batch(actions); }
#vulnerable code private Result[] batch(final List<? extends org.apache.hadoop.hbase.client.Row> actions) throws Exception { return createExecutor(options).batch(actions); } #location 3 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testGetCallback() throws Exception { when(mockBulkRead.add(any(Query.class))).thenReturn(mockFuture); byte[] key = randomBytes(8); FlatRow response = FlatRow.newBuilder().withRowKey(ByteString.copyFrom(key)).build(); setFuture(ImmutableList.of(response)); final Callback<Result> callback = Mockito.mock(Callback.class); List<Get> gets = Arrays.asList(new Get(key)); createExecutor().batchCallback(gets, new Object[1], callback); verify(callback, times(1)) .update( same(BatchExecutor.NO_REGION), same(key), argThat(matchesRow(Adapters.FLAT_ROW_ADAPTER.adaptResponse(response)))); }
#vulnerable code @Test public void testGetCallback() throws Exception { when(mockBulkRead.add(any(Query.class))).thenReturn(mockFuture); byte[] key = randomBytes(8); FlatRow response = FlatRow.newBuilder().withRowKey(ByteString.copyFrom(key)).build(); setFuture(ImmutableList.of(response)); final Callback<Result> callback = Mockito.mock(Callback.class); List<Get> gets = Arrays.asList(new Get(key)); createExecutor(options).batchCallback(gets, new Object[1], callback); verify(callback, times(1)) .update( same(BatchExecutor.NO_REGION), same(key), argThat(matchesRow(Adapters.FLAT_ROW_ADAPTER.adaptResponse(response)))); } #location 9 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override protected boolean onOK(Metadata trailers) { ProcessingStatus status = requestManager.onOK(); if (status == ProcessingStatus.INVALID) { // Set an exception. onError(INVALID_RESPONSE, trailers); return true; } // There was a problem in the data found in onMessage(), so fail the RPC. if (status == ProcessingStatus.SUCCESS || status == ProcessingStatus.NOT_RETRYABLE) { // Set the response, with either success, or non-retryable responses. completionFuture.set(Arrays.asList(requestManager.buildResponse())); return true; } // Perform a partial retry, if the backoff policy allows it. Long nextBackOff = getNextBackoff(); if (nextBackOff == null) { // Return the response as is, and don't retry; rpc.getRpcMetrics().markRetriesExhasted(); completionFuture.set(Arrays.asList(requestManager.buildResponse())); operationSpan.addAnnotation("MutationCount", ImmutableMap.of("failureCount", AttributeValue.longAttributeValue(requestManager.getRetryRequest().getEntriesCount()))); return true; } performRetry(nextBackOff); operationSpan.addAnnotation("MutationCount", ImmutableMap.of("retryCount", AttributeValue.longAttributeValue(requestManager.getRetryRequest().getEntriesCount()))); return false; }
#vulnerable code @Override protected boolean onOK(Metadata trailers) { ProcessingStatus status = requestManager.onOK(); if (status == ProcessingStatus.INVALID) { // Set an exception. onError(INVALID_RESPONSE, trailers); return true; } // There was a problem in the data found in onMessage(), so fail the RPC. if (status == ProcessingStatus.SUCCESS || status == ProcessingStatus.NOT_RETRYABLE) { // Set the response, with either success, or non-retryable responses. completionFuture.set(Arrays.asList(requestManager.buildResponse())); return true; } // Perform a partial retry, if the backoff policy allows it. long nextBackOff = getNextBackoff(); if (nextBackOff == BackOff.STOP) { // Return the response as is, and don't retry; rpc.getRpcMetrics().markRetriesExhasted(); completionFuture.set(Arrays.asList(requestManager.buildResponse())); operationSpan.addAnnotation("MutationCount", ImmutableMap.of("failureCount", AttributeValue.longAttributeValue(requestManager.getRetryRequest().getEntriesCount()))); return true; } performRetry(nextBackOff); operationSpan.addAnnotation("MutationCount", ImmutableMap.of("retryCount", AttributeValue.longAttributeValue(requestManager.getRetryRequest().getEntriesCount()))); return false; } #location 19 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test /* * Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh * logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it * triggers a call to syncRefresh() which potentially waits for refresh that was initiated * from another thread either through syncRefresh() or asyncRefresh(). This test case simulates * that condition. */ public void testRefreshDoesntHang() throws Exception { // Assume that the user starts at this time... it's an arbitrarily big number which will // assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not // be negative. long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10; setTimeInMillieconds(start); // RefreshingOAuth2CredentialsInterceptor will show that the access token is stale. final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1; // Create a mechanism that will allow us to control when the accessToken is returned. // credentials.refreshAccessToken() will get called asynchronously and will wait until the // lock is notified before returning. That will allow us to set up multiple concurrent calls final Object lock = new Object(); Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() { @Override public AccessToken answer(InvocationOnMock invocation) throws Throwable { synchronized (lock) { lock.wait(); } return new AccessToken("", new Date(expiration)); } }); // Force a synchronous refresh. This ought to wait until a refresh happening in another thread // completes. Callable<Void> syncRefreshCallable = new Callable<Void>() { @Override public Void call() throws Exception { underTest.syncRefresh(); return null; } }; underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials); // At this point, the access token wasn't retrieved yet. The // RefreshingOAuth2CredentialsInterceptor considers null to be Expired. Assert.assertEquals(CacheState.Expired, underTest.headerCache.getCacheState()); syncCall(lock, syncRefreshCallable); // Check to make sure that the AccessToken was retrieved. Assert.assertEquals(CacheState.Stale, underTest.headerCache.getCacheState()); // Check to make sure we're no longer refreshing. Assert.assertFalse(underTest.isRefreshing()); // Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be // necessary, but also should not be harmful, since there are likely to be multiple concurrent // requests that call asyncRefresh() when the token turns stale. underTest.asyncRefresh(); underTest.asyncRefresh(); underTest.asyncRefresh(); syncCall(lock, syncRefreshCallable); Assert.assertFalse(underTest.isRefreshing()); }
#vulnerable code @Test /* * Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh * logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it * triggers a call to syncRefresh() which potentially waits for refresh that was initiated * from another thread either through syncRefresh() or asyncRefresh(). This test case simulates * that condition. */ public void testRefreshDoesntHang() throws Exception { // Assume that the user starts at this time... it's an arbitrarily big number which will // assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not // be negative. long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10; setTimeInMillieconds(start); // RefreshingOAuth2CredentialsInterceptor will show that the access token is stale. final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1; // Create a mechanism that will allow us to control when the accessToken is returned. // credentials.refreshAccessToken() will get called asynchronously and will wait until the // lock is notified before returning. That will allow us to set up multiple concurrent calls final Object lock = new Object(); Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() { @Override public AccessToken answer(InvocationOnMock invocation) throws Throwable { synchronized (lock) { lock.wait(); } return new AccessToken("", new Date(expiration)); } }); // Force a synchronous refresh. This ought to wait until a refresh happening in another thread // completes. Callable<Void> syncRefreshCallable = new Callable<Void>() { @Override public Void call() throws Exception { underTest.syncRefresh(); return null; } }; underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials); underTest.rateLimiter.setRate(100000); // At this point, the access token wasn't retrieved yet. The // RefreshingOAuth2CredentialsInterceptor considers null to be Expired. Assert.assertEquals(CacheState.Expired, underTest.headerCache.getCacheState()); syncCall(lock, syncRefreshCallable); // Check to make sure that the AccessToken was retrieved. Assert.assertEquals(CacheState.Stale, underTest.headerCache.getCacheState()); // Check to make sure we're no longer refreshing. synchronized (underTest.lock) { Assert.assertFalse(underTest.isRefreshing); } // Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be // necessary, but also should not be harmful, since there are likely to be multiple concurrent // requests that call asyncRefresh() when the token turns stale. underTest.asyncRefresh(); underTest.asyncRefresh(); underTest.asyncRefresh(); syncCall(lock, syncRefreshCallable); Assert.assertFalse(underTest.isRefreshing); } #location 28 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public void onClose(Status status, Metadata trailers) { try (NonThrowingCloseable s = TRACER.withSpan(operationSpan)) { synchronized (callLock) { call = null; } rpcTimerContext.close(); // OK if (status.isOk()) { if (onOK(trailers)) { finalizeStats(status); } } else { onError(status, trailers); } } }
#vulnerable code @Override public void onClose(Status status, Metadata trailers) { synchronized (callLock) { call = null; } rpcTimerContext.close(); // OK if (status.isOk()) { if (onOK(trailers)) { operationTimerContext.close(); } } else { onError(status, trailers); } } #location 14 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testRetyableMutateRow() throws Exception { final MutateRowRequest request = MutateRowRequest.getDefaultInstance(); when(mockFuture.get()).thenReturn(Empty.getDefaultInstance()); underTest.mutateRow(request); verify(clientCallService, times(1)).listenableAsyncCall(any(ClientCall.class), same(request)); }
#vulnerable code @Test public void testRetyableMutateRow() throws InterruptedException { final MutateRowRequest request = MutateRowRequest.getDefaultInstance(); final AtomicBoolean done = new AtomicBoolean(false); executor.submit(new Callable<Void>(){ @Override public Void call() throws Exception { underTest.mutateRow(request); done.set(true); synchronized (done) { done.notify(); } return null; } }); Thread.sleep(100); future.set(MutateRowsResponse.getDefaultInstance()); synchronized (done) { done.wait(1000); } assertTrue(done.get()); verify(clientCallService, times(1)).listenableAsyncCall(any(ClientCall.class), same(request)); } #location 5 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public void setException(Exception exception) { rowMerger.onError(exception); super.setException(exception); }
#vulnerable code @Override public void setException(Exception exception) { rowObserver.onError(exception); // cleanup any state that was in RowMerger. There may be a partial row in progress which needs // to be reset. rowMerger = new RowMerger(rowObserver); super.setException(exception); } #location 6 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code protected void cancel(final String message) { callWrapper.cancel(message, null); }
#vulnerable code protected void cancel(final String message) { call.cancel(message, null); } #location 2 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public static Credentials getCredentials(CredentialOptions options) throws IOException, GeneralSecurityException { return patchCredentials(getCredentialsInner(options)); }
#vulnerable code public static Credentials getCredentials(CredentialOptions options) throws IOException, GeneralSecurityException { switch (options.getCredentialType()) { case DefaultCredentials: return getApplicationDefaultCredential(); case P12: P12CredentialOptions p12Options = (P12CredentialOptions) options; return getCredentialFromPrivateKeyServiceAccount( p12Options.getServiceAccount(), p12Options.getKeyFile()); case SuppliedCredentials: return ((UserSuppliedCredentialOptions) options).getCredential(); case SuppliedJson: JsonCredentialsOptions jsonCredentialsOptions = (JsonCredentialsOptions) options; synchronized (jsonCredentialsOptions) { if (jsonCredentialsOptions.getCachedCredentials() == null) { jsonCredentialsOptions.setCachedCredentails( getInputStreamCredential(jsonCredentialsOptions.getInputStream())); } return jsonCredentialsOptions.getCachedCredentials(); } case None: return null; default: throw new IllegalStateException( "Cannot process Credential type: " + options.getCredentialType()); } } #location 17 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @SuppressWarnings("unchecked") @Override public void run() { try { // restart the clock. synchronized (callLock) { super.run(); // pre-fetch one more result, for performance reasons. adapter.request(1); if (rowObserver instanceof ClientResponseObserver) { ((ClientResponseObserver<ReadRowsRequest, FlatRow>) rowObserver).beforeStart(adapter); } lastResponseMs = clock.currentTimeMillis(); } } catch (Exception e) { setException(e); } }
#vulnerable code @SuppressWarnings("unchecked") @Override public void run() { try { // restart the clock. this.rowMerger = new RowMerger(rowObserver); adapter = new CallToStreamObserverAdapter(); synchronized (callLock) { super.run(); // pre-fetch one more result, for performance reasons. adapter.request(1); if (rowObserver instanceof ClientResponseObserver) { ((ClientResponseObserver<ReadRowsRequest, FlatRow>) rowObserver).beforeStart(adapter); } lastResponseMs = clock.currentTimeMillis(); } } catch (Exception e) { setException(e); } } #location 7 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public void onClose(Status status, Metadata trailers) { try (NonThrowingCloseable s = TRACER.withSpan(operationSpan)) { synchronized (callLock) { call = null; } rpcTimerContext.close(); // OK if (status.isOk()) { if (onOK(trailers)) { finalizeStats(status); } } else { onError(status, trailers); } } }
#vulnerable code @Override public void onClose(Status status, Metadata trailers) { synchronized (callLock) { call = null; } rpcTimerContext.close(); // OK if (status.isOk()) { if (onOK(trailers)) { operationTimerContext.close(); } } else { onError(status, trailers); } } #location 11 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testRefreshAfterFailure() throws Exception { underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials); final AccessToken accessToken = new AccessToken("hi", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 1)); //noinspection unchecked Mockito.when(credentials.refreshAccessToken()) // First call will throw Exception & bypass retries .thenThrow(new IOException()) // Second call will succeed .thenReturn(accessToken); // First call HeaderCacheElement firstResult = underTest.getHeaderSafe(); Assert.assertEquals(CacheState.Exception, firstResult.getCacheState()); // Now the second token should be available HeaderCacheElement secondResult = underTest.getHeaderSafe(); Assert.assertEquals(CacheState.Good, secondResult.getCacheState()); Assert.assertThat(secondResult.header, containsString("hi")); // Make sure that the token was only requested twice: once for the first failure & second time for background recovery Mockito.verify(credentials, times(2)).refreshAccessToken(); }
#vulnerable code @Test public void testRefreshAfterFailure() throws Exception { underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials); underTest.rateLimiter.setRate(100000); final AccessToken accessToken = new AccessToken("hi", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 1)); //noinspection unchecked Mockito.when(credentials.refreshAccessToken()) // First call will throw Exception & bypass retries .thenThrow(new IOException()) // Second call will succeed .thenReturn(accessToken); // First call HeaderCacheElement firstResult = underTest.getHeaderSafe(); Assert.assertEquals(CacheState.Exception, firstResult.getCacheState()); // Now the second token should be available HeaderCacheElement secondResult = underTest.getHeaderSafe(); Assert.assertEquals(CacheState.Good, secondResult.getCacheState()); Assert.assertThat(secondResult.header, containsString("hi")); // Make sure that the token was only requested twice: once for the first failure & second time for background recovery Mockito.verify(credentials, times(2)).refreshAccessToken(); } #location 20 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test /* * Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh * logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it * triggers a call to syncRefresh() which potentially waits for refresh that was initiated * from another thread either through syncRefresh() or asyncRefresh(). This test case simulates * that condition. */ public void testRefreshDoesntHang() throws Exception { // Assume that the user starts at this time... it's an arbitrarily big number which will // assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not // be negative. long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10; setTimeInMillieconds(start); // RefreshingOAuth2CredentialsInterceptor will show that the access token is stale. final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1; // Create a mechanism that will allow us to control when the accessToken is returned. // credentials.refreshAccessToken() will get called asynchronously and will wait until the // lock is notified before returning. That will allow us to set up multiple concurrent calls final Object lock = new Object(); Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() { @Override public AccessToken answer(InvocationOnMock invocation) throws Throwable { synchronized (lock) { lock.wait(); } return new AccessToken("", new Date(expiration)); } }); // Force a synchronous refresh. This ought to wait until a refresh happening in another thread // completes. Callable<Void> syncRefreshCallable = new Callable<Void>() { @Override public Void call() throws Exception { underTest.syncRefresh(); return null; } }; underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials); // At this point, the access token wasn't retrieved yet. The // RefreshingOAuth2CredentialsInterceptor considers null to be Expired. Assert.assertEquals(CacheState.Expired, underTest.headerCache.getCacheState()); syncCall(lock, syncRefreshCallable); // Check to make sure that the AccessToken was retrieved. Assert.assertEquals(CacheState.Stale, underTest.headerCache.getCacheState()); // Check to make sure we're no longer refreshing. Assert.assertFalse(underTest.isRefreshing()); // Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be // necessary, but also should not be harmful, since there are likely to be multiple concurrent // requests that call asyncRefresh() when the token turns stale. underTest.asyncRefresh(); underTest.asyncRefresh(); underTest.asyncRefresh(); syncCall(lock, syncRefreshCallable); Assert.assertFalse(underTest.isRefreshing()); }
#vulnerable code @Test /* * Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh * logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it * triggers a call to syncRefresh() which potentially waits for refresh that was initiated * from another thread either through syncRefresh() or asyncRefresh(). This test case simulates * that condition. */ public void testRefreshDoesntHang() throws Exception { // Assume that the user starts at this time... it's an arbitrarily big number which will // assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not // be negative. long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10; setTimeInMillieconds(start); // RefreshingOAuth2CredentialsInterceptor will show that the access token is stale. final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1; // Create a mechanism that will allow us to control when the accessToken is returned. // credentials.refreshAccessToken() will get called asynchronously and will wait until the // lock is notified before returning. That will allow us to set up multiple concurrent calls final Object lock = new Object(); Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() { @Override public AccessToken answer(InvocationOnMock invocation) throws Throwable { synchronized (lock) { lock.wait(); } return new AccessToken("", new Date(expiration)); } }); // Force a synchronous refresh. This ought to wait until a refresh happening in another thread // completes. Callable<Void> syncRefreshCallable = new Callable<Void>() { @Override public Void call() throws Exception { underTest.syncRefresh(); return null; } }; underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials); underTest.rateLimiter.setRate(100000); // At this point, the access token wasn't retrieved yet. The // RefreshingOAuth2CredentialsInterceptor considers null to be Expired. Assert.assertEquals(CacheState.Expired, underTest.headerCache.getCacheState()); syncCall(lock, syncRefreshCallable); // Check to make sure that the AccessToken was retrieved. Assert.assertEquals(CacheState.Stale, underTest.headerCache.getCacheState()); // Check to make sure we're no longer refreshing. synchronized (underTest.lock) { Assert.assertFalse(underTest.isRefreshing); } // Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be // necessary, but also should not be harmful, since there are likely to be multiple concurrent // requests that call asyncRefresh() when the token turns stale. underTest.asyncRefresh(); underTest.asyncRefresh(); underTest.asyncRefresh(); syncCall(lock, syncRefreshCallable); Assert.assertFalse(underTest.isRefreshing); } #location 54 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private Boolean run() throws IOException { try { if (this.role.equals(XLearningConstants.WORKER)) { prepareInputFiles(); } if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) { createLocalOutputDir(); } } catch (InterruptedException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } catch (ExecutionException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } if ("TENSORFLOW".equals(xlearningAppType) && !single) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()), reservedSocket.getLocalPort(), this.role, this.index); while (true) { //TODO may be need encode use Base64 while used in Env this.clusterDef = amClient.getClusterDef(); if (this.clusterDef != null) { LOG.info("Cluster def is: " + this.clusterDef); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } } if (xlearningAppType.equals("DISTLIGHTGBM")) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); this.lightGBMLocalPort = reservedSocket.getLocalPort(); InetAddress address = null; try { address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString())); } catch (UnknownHostException e) { LOG.info("acquire host ip failed " + e); reportFailedAndExit(); } String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort(); LOG.info("lightGBM ip port string is: " + ipPortStr); amClient.reportLightGbmIpPort(containerId, ipPortStr); String lightGBMIpPortStr; while (true) { //TODO may be need encode use Base64 while used in Env lightGBMIpPortStr = amClient.getLightGbmIpPortStr(); if (lightGBMIpPortStr != null) { LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } Type type = new TypeToken<ConcurrentHashMap<String, String>>() { }.getType(); ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type); PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8"); for (String str : map.keySet()) { writer.println(map.get(str)); } writer.close(); } List<String> envList = new ArrayList<>(20); envList.add("PATH=" + System.getenv("PATH")); envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME")); envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME")); envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME")); envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native"); envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path")); envList.add("PYTHONUNBUFFERED=1"); envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList); if ("TENSORFLOW".equals(xlearningAppType)) { envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index); envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role); if (!single) { /** * set TF_CLUSTER_DEF in env * python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])" */ envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef); } } else if (xlearningAppType.equals("MXNET")) { if (!singleMx) { String dmlcID; if (this.role.equals("worker")) { dmlcID = "DMLC_WORKER_ID"; } else { dmlcID = "DMLC_SERVER_ID"; } envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI")); envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT")); envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER")); envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER")); envList.add(dmlcID + "=" + this.index); envList.add("DMLC_ROLE=" + this.role); } } else if (xlearningAppType.equals("DISTXGBOOST")) { envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI")); envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT")); envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER")); envList.add("DMLC_TASK_ID=" + this.index); envList.add("DMLC_ROLE=" + this.role); } else if (xlearningAppType.equals("DISTLIGHTGBM")) { envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString())); envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort); } if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) { envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList); if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) { LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)); envList.remove(envList.size() - 1); LOG.warn("InputFile list had written to local file: inputFileList.txt !!"); PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8"); writer.println(this.inputFileList); writer.close(); } } String[] env = envList.toArray(new String[envList.size()]); String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString()); LOG.info("Executing command:" + command); Runtime rt = Runtime.getRuntime(); //close reserved socket as tf will bind this port later this.reservedSocket.close(); final Process xlearningProcess = rt.exec(command, env); Date now = new Date(); heartbeatThread.setContainersStartTime(now.toString()); if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) { LOG.info("Starting thread to redirect stdin of xlearning process"); Thread stdinRedirectThread = new Thread(new Runnable() { @Override public void run() { try { OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream()); File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME)); GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile)); boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE); List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId)); JobConf jobConf = new JobConf(conf); RecordReader reader; InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class), jobConf); for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) { LOG.info("Epoch " + (j + 1) + " starting..."); for (int i = 0, len = inputs.size(); i < len; i++) { LOG.info("split " + (i + 1) + " is handling..."); reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL); Object key = reader.createKey(); Object value = reader.createValue(); Boolean finished = false; while (!finished) { try { finished = !reader.next(key, value); if (finished) { break; } osw.write(value.toString()); osw.write("\n"); if (j == 0 && isCache) { if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) { gos.write(value.toString().getBytes()); gos.write("\n".getBytes()); if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) { LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M " + "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M."); gzFile.delete(); LOG.info("Local cache file deleted and will not use cache."); isCache = false; } } } } catch (EOFException e) { finished = true; e.printStackTrace(); } } reader.close(); LOG.info("split " + (i + 1) + " is finished."); } LOG.info("Epoch " + (j + 1) + " finished."); if (isCache) { break; } } osw.close(); gos.close(); } catch (Exception e) { LOG.warn("Exception in thread stdinRedirectThread"); e.printStackTrace(); } } }); stdinRedirectThread.start(); } List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) { LOG.info("Starting thread to redirect stream stdout of xlearning process"); final Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); JobConf jobConf = new JobConf(conf); jobConf.setOutputKeyClass(Text.class); jobConf.setOutputValueClass(Text.class); jobConf.setBoolean("mapred.output.compress", true); jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec"); jobConf.setOutputFormat(TextMultiOutputFormat.class); Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString()); FileSystem dfs = remotePath.getFileSystem(jobConf); jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString()); OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class), jobConf); outputFormat.checkOutputSpecs(dfs, jobConf); JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0); TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0); jobConf.set("mapred.tip.id", taId.getTaskID().toString()); jobConf.set("mapred.task.id", taId.toString()); jobConf.set("mapred.job.id", jobID.toString()); amClient.reportMapedTaskID(containerId, taId.toString()); RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL); String xlearningStreamResultLine; while ((xlearningStreamResultLine = reader.readLine()) != null) { writer.write(null, xlearningStreamResultLine); } writer.close(Reporter.NULL); reader.close(); dfs.close(); } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } else { LOG.info("Starting thread to redirect stdout of xlearning process"); Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); String xlearningStdoutLog; while ((xlearningStdoutLog = reader.readLine()) != null) { LOG.info(xlearningStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } LOG.info("Starting thread to redirect stderr of xlearning process"); Thread stderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream())); String xlearningStderrLog; while ((xlearningStderrLog = reader.readLine()) != null) { if (xlearningStderrLog.contains("reporter progress")) { heartbeatThread.setProgressLog(xlearningStderrLog); } else { LOG.info(xlearningStderrLog); } } } catch (Exception e) { LOG.warn("Error in thread stderrRedirectThread"); e.printStackTrace(); } } }); stderrRedirectThread.start(); heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING); //Start board process int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX); Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE); if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) { Socket boardReservedSocket = new Socket(); try { boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0)); } catch (IOException e) { LOG.error("Can not get available port"); reportFailedAndExit(); } String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString()); String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR); int boardPort = boardReservedSocket.getLocalPort(); String boardCommand; if ("TENSORFLOW".equals(xlearningAppType)) { int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL); boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir; } else { int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT); boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout; String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB); if (!(modelpb.equals("") || modelpb == null)) { boardCommand = boardCommand + " --model_pb=" + modelpb; } } String boardUrl = "http://" + boardHost + ":" + boardPort; LOG.info("Executing board command:" + boardCommand); boardReservedSocket.close(); try { final Process boardProcess = rt.exec(boardCommand, env); LOG.info("Starting thread to redirect stdout of board process"); Thread boardStdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream())); String boardStdoutLog; while ((boardStdoutLog = reader.readLine()) != null) { LOG.debug(boardStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread boardStdoutRedirectThread"); e.printStackTrace(); } } }); boardStdoutRedirectThread.start(); LOG.info("Starting thread to redirect stderr of board process"); Thread boardStderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream())); String boardStderrLog; while ((boardStderrLog = reader.readLine()) != null) { LOG.debug(boardStderrLog); } } catch (Exception e) { LOG.warn("Error in thread boardStderrRedirectThread"); e.printStackTrace(); } } }); boardStderrRedirectThread.start(); amClient.reportTensorBoardURL(boardUrl); LOG.info("Container index is " + index + ", report board url:" + boardUrl); } catch (Exception e) { LOG.error("Board Process failed. For more detail: " + e); } } int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL); this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess); LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId); containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId); containerReporter.setDaemon(true); containerReporter.start(); int code = -1; while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) { Utilities.sleep(updateAppStatusInterval); try { code = xlearningProcess.exitValue(); } catch (IllegalThreadStateException e) { LOG.debug("XLearning Process is running"); } } if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) { if (code == -1 || code == 0) { this.uploadOutputFiles(); } } if (this.role.equals(XLearningConstants.PS)) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } if (this.role.equals("server")) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } //As role is worker if (code == 0) { this.uploadOutputFiles(); } else { return false; } return true; }
#vulnerable code private Boolean run() throws IOException { try { if (this.role.equals(XLearningConstants.WORKER)) { prepareInputFiles(); } if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) { createLocalOutputDir(); } } catch (InterruptedException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } catch (ExecutionException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } if ("TENSORFLOW".equals(xlearningAppType) && !single) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()), reservedSocket.getLocalPort(), this.role, this.index); while (true) { //TODO may be need encode use Base64 while used in Env this.clusterDef = amClient.getClusterDef(); if (this.clusterDef != null) { LOG.info("Cluster def is: " + this.clusterDef); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } } if (xlearningAppType.equals("DISTLIGHTGBM")) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); this.lightGBMLocalPort = reservedSocket.getLocalPort(); InetAddress address = null; try { address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString())); } catch (UnknownHostException e) { LOG.info("acquire host ip failed " + e); reportFailedAndExit(); } String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort(); LOG.info("lightGBM ip port string is: " + ipPortStr); amClient.reportLightGbmIpPort(containerId, ipPortStr); String lightGBMIpPortStr; while (true) { //TODO may be need encode use Base64 while used in Env lightGBMIpPortStr = amClient.getLightGbmIpPortStr(); if (lightGBMIpPortStr != null) { LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } Type type = new TypeToken<ConcurrentHashMap<String, String>>() { }.getType(); ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type); PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8"); for (String str : map.keySet()) { writer.println(map.get(str)); } writer.close(); } List<String> envList = new ArrayList<>(20); envList.add("PATH=" + System.getenv("PATH")); envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME")); envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME")); envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME")); envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native"); envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path")); envList.add("PYTHONUNBUFFERED=1"); envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList); if ("TENSORFLOW".equals(xlearningAppType)) { envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index); envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role); if (!single) { /** * set TF_CLUSTER_DEF in env * python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])" */ envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef); } } else if (xlearningAppType.equals("MXNET")) { if (!singleMx) { String dmlcID; if (this.role.equals("worker")) { dmlcID = "DMLC_WORKER_ID"; } else { dmlcID = "DMLC_SERVER_ID"; } envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI")); envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT")); envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER")); envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER")); envList.add(dmlcID + "=" + this.index); envList.add("DMLC_ROLE=" + this.role); } } else if (xlearningAppType.equals("DISTXGBOOST")) { envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI")); envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT")); envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER")); envList.add("DMLC_TASK_ID=" + this.index); envList.add("DMLC_ROLE=" + this.role); } else if (xlearningAppType.equals("DISTLIGHTGBM")) { envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString())); envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort); } if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) { envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList); if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) { LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)); envList.remove(envList.size() - 1); LOG.warn("InputFile list had written to local file: inputFileList.txt !!"); PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8"); writer.println(this.inputFileList); writer.close(); } } String[] env = envList.toArray(new String[envList.size()]); String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString()); LOG.info("Executing command:" + command); Runtime rt = Runtime.getRuntime(); //close reserved socket as tf will bind this port later this.reservedSocket.close(); final Process xlearningProcess = rt.exec(command, env); Date now = new Date(); heartbeatThread.setContainersStartTime(now.toString()); if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) { LOG.info("Starting thread to redirect stdin of xlearning process"); Thread stdinRedirectThread = new Thread(new Runnable() { @Override public void run() { try { OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream()); File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME)); GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile)); boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE); List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId)); JobConf jobConf = new JobConf(conf); RecordReader reader; InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class), jobConf); for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) { LOG.info("Epoch " + (j + 1) + " starting..."); for (int i = 0, len = inputs.size(); i < len; i++) { LOG.info("split " + (i + 1) + " is handling..."); reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL); Object key = reader.createKey(); Object value = reader.createValue(); Boolean finished = false; while (!finished) { try { finished = !reader.next(key, value); if (finished) { break; } osw.write(value.toString()); osw.write("\n"); if (j == 0 && isCache) { if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) { gos.write(value.toString().getBytes()); gos.write("\n".getBytes()); if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) { LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M " + "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M."); gzFile.delete(); LOG.info("Local cache file deleted and will not use cache."); isCache = false; } } } } catch (EOFException e) { finished = true; e.printStackTrace(); } } reader.close(); LOG.info("split " + (i + 1) + " is finished."); } LOG.info("Epoch " + (j + 1) + " finished."); if (isCache) { break; } } osw.close(); gos.close(); } catch (Exception e) { LOG.warn("Exception in thread stdinRedirectThread"); e.printStackTrace(); } } }); stdinRedirectThread.start(); } List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) { LOG.info("Starting thread to redirect stream stdout of xlearning process"); final Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); JobConf jobConf = new JobConf(conf); jobConf.setOutputKeyClass(Text.class); jobConf.setOutputValueClass(Text.class); jobConf.setBoolean("mapred.output.compress", true); jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec"); jobConf.setOutputFormat(TextMultiOutputFormat.class); Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString()); FileSystem dfs = remotePath.getFileSystem(jobConf); jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString()); OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class), jobConf); outputFormat.checkOutputSpecs(dfs, jobConf); JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0); TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0); jobConf.set("mapred.tip.id", taId.getTaskID().toString()); jobConf.set("mapred.task.id", taId.toString()); jobConf.set("mapred.job.id", jobID.toString()); amClient.reportMapedTaskID(containerId, taId.toString()); RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL); String xlearningStreamResultLine; while ((xlearningStreamResultLine = reader.readLine()) != null) { writer.write(null, xlearningStreamResultLine); } writer.close(Reporter.NULL); reader.close(); dfs.close(); } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } else { LOG.info("Starting thread to redirect stdout of xlearning process"); Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); String xlearningStdoutLog; while ((xlearningStdoutLog = reader.readLine()) != null) { LOG.info(xlearningStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } LOG.info("Starting thread to redirect stderr of xlearning process"); Thread stderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream())); String xlearningStderrLog; while ((xlearningStderrLog = reader.readLine()) != null) { if (xlearningStderrLog.contains("reporter progress")) { heartbeatThread.setProgressLog(xlearningStderrLog); } else { LOG.info(xlearningStderrLog); } } } catch (Exception e) { LOG.warn("Error in thread stderrRedirectThread"); e.printStackTrace(); } } }); stderrRedirectThread.start(); heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING); //Start board process int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX); Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE); if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) { Socket boardReservedSocket = new Socket(); try { boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0)); } catch (IOException e) { LOG.error("Can not get available port"); reportFailedAndExit(); } String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString()); String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR); int boardPort = boardReservedSocket.getLocalPort(); String boardCommand; if ("TENSORFLOW".equals(xlearningAppType)) { int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL); boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir; } else { int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT); boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout; String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB); if (!(modelpb.equals("") || modelpb == null)) { boardCommand = boardCommand + " --model_pb=" + modelpb; } } String boardUrl = "http://" + boardHost + ":" + boardPort; LOG.info("Executing board command:" + boardCommand); boardReservedSocket.close(); try { final Process boardProcess = rt.exec(boardCommand, env); LOG.info("Starting thread to redirect stdout of board process"); Thread boardStdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream())); String boardStdoutLog; while ((boardStdoutLog = reader.readLine()) != null) { LOG.debug(boardStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread boardStdoutRedirectThread"); e.printStackTrace(); } } }); boardStdoutRedirectThread.start(); LOG.info("Starting thread to redirect stderr of board process"); Thread boardStderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream())); String boardStderrLog; while ((boardStderrLog = reader.readLine()) != null) { LOG.debug(boardStderrLog); } } catch (Exception e) { LOG.warn("Error in thread boardStderrRedirectThread"); e.printStackTrace(); } } }); boardStderrRedirectThread.start(); amClient.reportTensorBoardURL(boardUrl); LOG.info("Container index is " + index + ", report board url:" + boardUrl); } catch (Exception e) { LOG.error("Board Process failed. For more detail: " + e); } } int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL); if (this.role.equals(XLearningConstants.WORKER)) { this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess); LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId); containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId); containerReporter.setDaemon(true); containerReporter.start(); } int code = -1; while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) { Utilities.sleep(updateAppStatusInterval); try { code = xlearningProcess.exitValue(); } catch (IllegalThreadStateException e) { LOG.debug("XLearning Process is running"); } } if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) { if (code == -1 || code == 0) { this.uploadOutputFiles(); } } if (this.role.equals(XLearningConstants.PS)) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } if (this.role.equals("server")) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } //As role is worker if (code == 0) { this.uploadOutputFiles(); } else { return false; } return true; } #location 399 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private Boolean run() throws IOException { try { if (this.role.equals(XLearningConstants.WORKER)) { prepareInputFiles(); } if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) { createLocalOutputDir(); } } catch (InterruptedException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } catch (ExecutionException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } if ("TENSORFLOW".equals(xlearningAppType) && !single) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()), reservedSocket.getLocalPort(), this.role, this.index); while (true) { //TODO may be need encode use Base64 while used in Env this.clusterDef = amClient.getClusterDef(); if (this.clusterDef != null) { LOG.info("Cluster def is: " + this.clusterDef); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } } if (xlearningAppType.equals("DISTLIGHTGBM")) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); this.lightGBMLocalPort = reservedSocket.getLocalPort(); InetAddress address = null; try { address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString())); } catch (UnknownHostException e) { LOG.info("acquire host ip failed " + e); reportFailedAndExit(); } String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort(); LOG.info("lightGBM ip port string is: " + ipPortStr); amClient.reportLightGbmIpPort(containerId, ipPortStr); String lightGBMIpPortStr; while (true) { //TODO may be need encode use Base64 while used in Env lightGBMIpPortStr = amClient.getLightGbmIpPortStr(); if (lightGBMIpPortStr != null) { LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } Type type = new TypeToken<ConcurrentHashMap<String, String>>() { }.getType(); ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type); PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8"); for (String str : map.keySet()) { writer.println(map.get(str)); } writer.close(); } String[] env = null; if ("TENSORFLOW".equals(xlearningAppType)) { if (single) { env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "PYTHONUNBUFFERED=1", XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index, XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role, XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } else { /** * set TF_CLUSTER_DEF in env * python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])" */ env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "PYTHONUNBUFFERED=1", XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef, XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index, XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role, XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } } else if (xlearningAppType.equals("MXNET")) { if (singleMx) { env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "PYTHONUNBUFFERED=1", XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } else if (xlearningAppType.equals("DISTXGBOOST")) { env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"), "DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"), "DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"), "PYTHONUNBUFFERED=1", "DMLC_TASK_ID=" + this.index, "DMLC_ROLE=" + this.role, XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } else if (xlearningAppType.equals("DISTLIGHTGBM")) { env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()), "LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort, "PYTHONUNBUFFERED=1", XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } else { String dmlcID; if (this.role.equals("worker")) { dmlcID = "DMLC_WORKER_ID"; } else { dmlcID = "DMLC_SERVER_ID"; } env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"), "DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"), "DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"), "DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"), "PYTHONUNBUFFERED=1", dmlcID + "=" + this.index, "DMLC_ROLE=" + this.role, XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } } else { env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "PYTHONUNBUFFERED=1", XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString()); LOG.info("Executing command:" + command); Runtime rt = Runtime.getRuntime(); //close reserved socket as tf will bind this port later this.reservedSocket.close(); final Process xlearningProcess = rt.exec(command, env); Date now = new Date(); heartbeatThread.setContainersStartTime(now.toString()); if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).equals("STREAM")) { LOG.info("Starting thread to redirect stdin of xlearning process"); Thread stdinRedirectThread = new Thread(new Runnable() { @Override public void run() { try { OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream()); List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId)); JobConf jobConf = new JobConf(conf); RecordReader reader; InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class), jobConf); for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) { LOG.info("Epoch " + (j + 1) + " starting..."); for (int i = 0, len = inputs.size(); i < len; i++) { LOG.info("split " + (i + 1) + " is handling..."); reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL); Object key = reader.createKey(); Object value = reader.createValue(); Boolean finished = false; while (!finished) { try { finished = !reader.next(key, value); if (finished) { break; } osw.write(value.toString()); osw.write("\n"); } catch (EOFException e) { finished = true; } } reader.close(); LOG.info("split " + (i + 1) + " is finished."); } LOG.info("Epoch " + (j + 1) + " finished."); } osw.close(); } catch (Exception e) { LOG.warn("Exception in thread stdinRedirectThread"); e.printStackTrace(); } } }); stdinRedirectThread.start(); } List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) { LOG.info("Starting thread to redirect stream stdout of xlearning process"); final Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); JobConf jobConf = new JobConf(conf); jobConf.setOutputKeyClass(Text.class); jobConf.setOutputValueClass(Text.class); jobConf.setBoolean("mapred.output.compress", true); jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec"); jobConf.setOutputFormat(TextMultiOutputFormat.class); Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString()); FileSystem dfs = remotePath.getFileSystem(jobConf); jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString()); OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class), jobConf); outputFormat.checkOutputSpecs(dfs, jobConf); JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0); TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0); jobConf.set("mapred.tip.id", taId.getTaskID().toString()); jobConf.set("mapred.task.id", taId.toString()); jobConf.set("mapred.job.id", jobID.toString()); amClient.reportMapedTaskID(containerId, taId.toString()); RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL); String xlearningStreamResultLine; while ((xlearningStreamResultLine = reader.readLine()) != null) { writer.write(null, xlearningStreamResultLine); } writer.close(Reporter.NULL); reader.close(); dfs.close(); } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } else { LOG.info("Starting thread to redirect stdout of xlearning process"); Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); String xlearningStdoutLog; while ((xlearningStdoutLog = reader.readLine()) != null) { LOG.info(xlearningStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } LOG.info("Starting thread to redirect stderr of xlearning process"); Thread stderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream())); String xlearningStderrLog; while ((xlearningStderrLog = reader.readLine()) != null) { if (xlearningStderrLog.contains("reporter progress")) { heartbeatThread.setProgressLog(xlearningStderrLog); } else { LOG.info(xlearningStderrLog); } } } catch (Exception e) { LOG.warn("Error in thread stderrRedirectThread"); e.printStackTrace(); } } }); stderrRedirectThread.start(); heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING); //Start tensorboard process if ("TENSORFLOW".equals(xlearningAppType)) { int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX); Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE); if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) { Socket boardReservedSocket = new Socket(); try { boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0)); } catch (IOException e) { LOG.error("Can not get available port"); reportFailedAndExit(); } String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString()); int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL); String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR); int boardPort = boardReservedSocket.getLocalPort(); String boardCommand = "tensorboard --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir; String boardUrl = "http://" + boardHost + ":" + boardPort; LOG.info("Executing tensorborad command:" + boardCommand); boardReservedSocket.close(); final Process boardProcess = rt.exec(boardCommand, env); LOG.info("Starting thread to redirect stdout of tensorboard process"); Thread boardStdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream())); String boardStdoutLog; while ((boardStdoutLog = reader.readLine()) != null) { LOG.debug(boardStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread boardStdoutRedirectThread"); e.printStackTrace(); } } }); boardStdoutRedirectThread.start(); LOG.info("Starting thread to redirect stderr of tensorboard process"); Thread boardStderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream())); String boardStderrLog; while ((boardStderrLog = reader.readLine()) != null) { LOG.debug(boardStderrLog); } } catch (Exception e) { LOG.warn("Error in thread boardStderrRedirectThread"); e.printStackTrace(); } } }); boardStderrRedirectThread.start(); amClient.reportTensorBoardURL(boardUrl); LOG.info("Container index is " + index + ", report tensorboard url:" + boardUrl); } } int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL); int code = -1; while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) { Utilities.sleep(updateAppStatusInterval); try { code = xlearningProcess.exitValue(); } catch (IllegalThreadStateException e) { LOG.debug("XLearning Process is running"); } } if (this.role.equals(XLearningConstants.PS)) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } if (this.role.equals("server")) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } //As role is worker if (code == 0) { this.uploadOutputFiles(); } else { return false; } return true; }
#vulnerable code private Boolean run() throws IOException { try { if (this.role.equals(XLearningConstants.WORKER)) { prepareInputFiles(); } if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) { createLocalOutputDir(); } } catch (InterruptedException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } catch (ExecutionException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } if ("TENSORFLOW".equals(xlearningAppType) && !single) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()), reservedSocket.getLocalPort(), this.role, this.index); while (true) { //TODO may be need encode use Base64 while used in Env this.clusterDef = amClient.getClusterDef(); if (this.clusterDef != null) { LOG.info("Cluster def is: " + this.clusterDef); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } } String[] env = null; if ("TENSORFLOW".equals(xlearningAppType)) { if (single) { env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "PYTHONUNBUFFERED=1", XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index, XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role, XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } else { /** * set TF_CLUSTER_DEF in env * python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])" */ env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "PYTHONUNBUFFERED=1", XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef, XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index, XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role, XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } } else if (xlearningAppType.equals("MXNET")) { if (singleMx) { env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "PYTHONUNBUFFERED=1", XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } else if (xlearningAppType.equals("DISTXGBOOST")) { env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"), "DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"), "DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"), "PYTHONUNBUFFERED=1", "DMLC_TASK_ID=" + this.index, "DMLC_ROLE=" + this.role, XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } else { String dmlcID; if (this.role.equals("worker")) { dmlcID = "DMLC_WORKER_ID"; } else { dmlcID = "DMLC_SERVER_ID"; } env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"), "DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"), "DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"), "DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"), "PYTHONUNBUFFERED=1", dmlcID + "=" + this.index, "DMLC_ROLE=" + this.role, XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } } else { env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "PYTHONUNBUFFERED=1", XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString()); LOG.info("Executing command:" + command); Runtime rt = Runtime.getRuntime(); //close reserved socket as tf will bind this port later this.reservedSocket.close(); final Process xlearningProcess = rt.exec(command, env); Date now = new Date(); heartbeatThread.setContainersStartTime(now.toString()); if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).equals("STREAM")) { LOG.info("Starting thread to redirect stdin of xlearning process"); Thread stdinRedirectThread = new Thread(new Runnable() { @Override public void run() { try { OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream()); List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId)); JobConf jobConf = new JobConf(conf); RecordReader reader; InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class), jobConf); for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) { LOG.info("Epoch " + (j + 1) + " starting..."); for (int i = 0, len = inputs.size(); i < len; i++) { LOG.info("split " + (i + 1) + " is handling..."); reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL); Object key = reader.createKey(); Object value = reader.createValue(); Boolean finished = false; while (!finished) { try { finished = !reader.next(key, value); if (finished) { break; } osw.write(value.toString()); osw.write("\n"); } catch (EOFException e) { finished = true; } } reader.close(); LOG.info("split " + (i + 1) + " is finished."); } LOG.info("Epoch " + (j + 1) + " finished."); } osw.close(); } catch (Exception e) { LOG.warn("Exception in thread stdinRedirectThread"); e.printStackTrace(); } } }); stdinRedirectThread.start(); } List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) { LOG.info("Starting thread to redirect stream stdout of xlearning process"); final Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); JobConf jobConf = new JobConf(conf); jobConf.setOutputKeyClass(Text.class); jobConf.setOutputValueClass(Text.class); jobConf.setBoolean("mapred.output.compress", true); jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec"); jobConf.setOutputFormat(TextMultiOutputFormat.class); Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString()); FileSystem dfs = remotePath.getFileSystem(jobConf); jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString()); OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class), jobConf); outputFormat.checkOutputSpecs(dfs, jobConf); JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0); TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0); jobConf.set("mapred.tip.id", taId.getTaskID().toString()); jobConf.set("mapred.task.id", taId.toString()); jobConf.set("mapred.job.id", jobID.toString()); amClient.reportMapedTaskID(containerId, taId.toString()); RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL); String xlearningStreamResultLine; while ((xlearningStreamResultLine = reader.readLine()) != null) { writer.write(null, xlearningStreamResultLine); } writer.close(Reporter.NULL); reader.close(); dfs.close(); } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } else { LOG.info("Starting thread to redirect stdout of xlearning process"); Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); String xlearningStdoutLog; while ((xlearningStdoutLog = reader.readLine()) != null) { LOG.info(xlearningStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } LOG.info("Starting thread to redirect stderr of xlearning process"); Thread stderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream())); String xlearningStderrLog; while ((xlearningStderrLog = reader.readLine()) != null) { if (xlearningStderrLog.contains("reporter progress")) { heartbeatThread.setProgressLog(xlearningStderrLog); } else { LOG.info(xlearningStderrLog); } } } catch (Exception e) { LOG.warn("Error in thread stderrRedirectThread"); e.printStackTrace(); } } }); stderrRedirectThread.start(); heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING); //Start tensorboard process if ("TENSORFLOW".equals(xlearningAppType)) { int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX); Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE); if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) { Socket boardReservedSocket = new Socket(); try { boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0)); } catch (IOException e) { LOG.error("Can not get available port"); reportFailedAndExit(); } String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString()); int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL); String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR); int boardPort = boardReservedSocket.getLocalPort(); String boardCommand = "tensorboard --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir; String boardUrl = "http://" + boardHost + ":" + boardPort; LOG.info("Executing tensorborad command:" + boardCommand); boardReservedSocket.close(); final Process boardProcess = rt.exec(boardCommand, env); LOG.info("Starting thread to redirect stdout of tensorboard process"); Thread boardStdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream())); String boardStdoutLog; while ((boardStdoutLog = reader.readLine()) != null) { LOG.debug(boardStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread boardStdoutRedirectThread"); e.printStackTrace(); } } }); boardStdoutRedirectThread.start(); LOG.info("Starting thread to redirect stderr of tensorboard process"); Thread boardStderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream())); String boardStderrLog; while ((boardStderrLog = reader.readLine()) != null) { LOG.debug(boardStderrLog); } } catch (Exception e) { LOG.warn("Error in thread boardStderrRedirectThread"); e.printStackTrace(); } } }); boardStderrRedirectThread.start(); amClient.reportTensorBoardURL(boardUrl); LOG.info("Container index is " + index + ", report tensorboard url:" + boardUrl); } } int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL); int code = -1; while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) { Utilities.sleep(updateAppStatusInterval); try { code = xlearningProcess.exitValue(); } catch (IllegalThreadStateException e) { LOG.debug("XLearning Process is running"); } } if (this.role.equals(XLearningConstants.PS)) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } if (this.role.equals("server")) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } //As role is worker if (code == 0) { this.uploadOutputFiles(); } else { return false; } return true; } #location 186 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private Boolean run() throws IOException { try { if (this.role.equals(XLearningConstants.WORKER)) { prepareInputFiles(); } if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) { createLocalOutputDir(); } } catch (InterruptedException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } catch (ExecutionException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } if (xlearningAppType.equals("LIGHTLDA")) { if (this.role.equals(XLearningConstants.PS)) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); this.lightLDALocalPort = reservedSocket.getLocalPort(); InetAddress address = null; try { address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString())); } catch (UnknownHostException e) { LOG.info("acquire host ip failed " + e); reportFailedAndExit(); } String ipPortStr = this.index + " " + address.getHostAddress() + ":" + this.lightLDALocalPort; this.lightLDAEndpoint = address.getHostAddress() + ":" + this.lightLDALocalPort; LOG.info("lightLDA ip port string is: " + ipPortStr); amClient.reportLightLDAIpPort(containerId, ipPortStr); } if (this.role.equals(XLearningConstants.WORKER)) { String lightLDAIpPortStr; while (true) { lightLDAIpPortStr = amClient.getLightLDAIpPortStr(); if (lightLDAIpPortStr != null) { LOG.info("lightLDA IP PORT list is: " + lightLDAIpPortStr); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } Type type = new TypeToken<ConcurrentHashMap<String, String>>() { }.getType(); ConcurrentHashMap<String, String> map = new Gson().fromJson(lightLDAIpPortStr, type); PrintWriter writer = new PrintWriter("lightLDAEndPoints.txt", "UTF-8"); for (String str : map.keySet()) { writer.println(map.get(str)); } writer.close(); } } if ("TENSORFLOW".equals(xlearningAppType) && !single) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()), reservedSocket.getLocalPort(), this.role, this.index); while (true) { //TODO may be need encode use Base64 while used in Env this.clusterDef = amClient.getClusterDef(); if (this.clusterDef != null) { LOG.info("Cluster def is: " + this.clusterDef); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } } if (xlearningAppType.equals("DISTLIGHTGBM")) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); this.lightGBMLocalPort = reservedSocket.getLocalPort(); InetAddress address = null; try { address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString())); } catch (UnknownHostException e) { LOG.info("acquire host ip failed " + e); reportFailedAndExit(); } String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort(); LOG.info("lightGBM ip port string is: " + ipPortStr); amClient.reportLightGbmIpPort(containerId, ipPortStr); String lightGBMIpPortStr; while (true) { //TODO may be need encode use Base64 while used in Env lightGBMIpPortStr = amClient.getLightGbmIpPortStr(); if (lightGBMIpPortStr != null) { LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } Type type = new TypeToken<ConcurrentHashMap<String, String>>() { }.getType(); ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type); PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8"); for (String str : map.keySet()) { writer.println(map.get(str)); } writer.close(); } List<String> envList = new ArrayList<>(20); envList.add("PATH=" + System.getenv("PATH")); envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME")); envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME")); envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME")); envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native"); envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path")); envList.add("PYTHONUNBUFFERED=1"); envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList); if ("TENSORFLOW".equals(xlearningAppType)) { envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index); envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role); if (!single) { /** * set TF_CLUSTER_DEF in env * python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])" */ envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef); } } else if (xlearningAppType.equals("MXNET")) { if (!single) { String dmlcID; if (this.role.equals("worker")) { dmlcID = "DMLC_WORKER_ID"; } else { dmlcID = "DMLC_SERVER_ID"; } envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI")); envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT")); envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER")); envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER")); envList.add(dmlcID + "=" + this.index); envList.add("DMLC_ROLE=" + this.role); } } else if (xlearningAppType.equals("DISTXGBOOST")) { envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI")); envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT")); envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER")); envList.add("DMLC_TASK_ID=" + this.index); envList.add("DMLC_ROLE=" + this.role); } else if (xlearningAppType.equals("DISTLIGHTGBM")) { envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString())); envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort); } else if (xlearningAppType.equals("LIGHTLDA")) { envList.add("LIGHTLDA_WORKER_NUM=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTLDA_WORKER_NUM.toString())); envList.add("LIGHTLDA_SERVER_NUM=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTLDA_PS_NUM.toString())); envList.add("LIGHTLDA_RANK=" + this.index); envList.add("LIGHTLDA_SERVER_ENDPOINT=" + this.lightLDAEndpoint); envList.add("LIGHTLDA_ROLE=" + this.role); } if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) { envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList); if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) { LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)); envList.remove(envList.size() - 1); LOG.warn("InputFile list had written to local file: inputFileList.txt !!"); PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8"); writer.println(this.inputFileList); writer.close(); } } String[] env = envList.toArray(new String[envList.size()]); String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString()); LOG.info("Executing command:" + command); Runtime rt = Runtime.getRuntime(); //close reserved socket as tf will bind this port later this.reservedSocket.close(); final Process xlearningProcess = rt.exec(command, env); Date now = new Date(); heartbeatThread.setContainersStartTime(now.toString()); if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) { LOG.info("Starting thread to redirect stdin of xlearning process"); Thread stdinRedirectThread = new Thread(new Runnable() { @Override public void run() { try { OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream()); File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME)); GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile)); boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE); List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId)); JobConf jobConf = new JobConf(conf); RecordReader reader; InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class), jobConf); for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) { LOG.info("Epoch " + (j + 1) + " starting..."); for (int i = 0, len = inputs.size(); i < len; i++) { LOG.info("split " + (i + 1) + " is handling..."); reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL); Object key = reader.createKey(); Object value = reader.createValue(); Boolean finished = false; while (!finished) { try { finished = !reader.next(key, value); if (finished) { break; } osw.write(value.toString()); osw.write("\n"); if (j == 0 && isCache) { if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) { gos.write(value.toString().getBytes()); gos.write("\n".getBytes()); if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) { LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M " + "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M."); gzFile.delete(); LOG.info("Local cache file deleted and will not use cache."); isCache = false; } } } } catch (EOFException e) { finished = true; e.printStackTrace(); } } reader.close(); LOG.info("split " + (i + 1) + " is finished."); } LOG.info("Epoch " + (j + 1) + " finished."); if (isCache) { break; } } osw.close(); gos.close(); } catch (Exception e) { LOG.warn("Exception in thread stdinRedirectThread"); e.printStackTrace(); } } }); stdinRedirectThread.start(); } List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) { LOG.info("Starting thread to redirect stream stdout of xlearning process"); final Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); JobConf jobConf = new JobConf(conf); jobConf.setOutputKeyClass(Text.class); jobConf.setOutputValueClass(Text.class); jobConf.setBoolean("mapred.output.compress", true); jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec"); jobConf.setOutputFormat(TextMultiOutputFormat.class); Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString()); FileSystem dfs = remotePath.getFileSystem(jobConf); jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString()); OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class), jobConf); outputFormat.checkOutputSpecs(dfs, jobConf); JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0); TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0); jobConf.set("mapred.tip.id", taId.getTaskID().toString()); jobConf.set("mapred.task.id", taId.toString()); jobConf.set("mapred.job.id", jobID.toString()); amClient.reportMapedTaskID(containerId, taId.toString()); RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL); String xlearningStreamResultLine; while ((xlearningStreamResultLine = reader.readLine()) != null) { writer.write(null, xlearningStreamResultLine); } writer.close(Reporter.NULL); reader.close(); dfs.close(); } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } else { LOG.info("Starting thread to redirect stdout of xlearning process"); Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); String xlearningStdoutLog; while ((xlearningStdoutLog = reader.readLine()) != null) { LOG.info(xlearningStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } LOG.info("Starting thread to redirect stderr of xlearning process"); Thread stderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream())); String xlearningStderrLog; while ((xlearningStderrLog = reader.readLine()) != null) { if (xlearningStderrLog.contains("reporter progress")) { heartbeatThread.setProgressLog(xlearningStderrLog); } else { LOG.info(xlearningStderrLog); } } } catch (Exception e) { LOG.warn("Error in thread stderrRedirectThread"); e.printStackTrace(); } } }); stderrRedirectThread.start(); heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING); //Start board process int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX); Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE); if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) { Socket boardReservedSocket = new Socket(); try { boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0)); } catch (IOException e) { LOG.error("Can not get available port"); reportFailedAndExit(); } String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString()); String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR); int boardPort = boardReservedSocket.getLocalPort(); String boardCommand; if ("TENSORFLOW".equals(xlearningAppType)) { int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL); boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir; } else { int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT); boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout; String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB); if (!(modelpb.equals("") || modelpb == null)) { boardCommand = boardCommand + " --model_pb=" + modelpb; } } String boardUrl = "http://" + boardHost + ":" + boardPort; LOG.info("Executing board command:" + boardCommand); boardReservedSocket.close(); try { final Process boardProcess = rt.exec(boardCommand, env); LOG.info("Starting thread to redirect stdout of board process"); Thread boardStdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream())); String boardStdoutLog; while ((boardStdoutLog = reader.readLine()) != null) { LOG.debug(boardStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread boardStdoutRedirectThread"); e.printStackTrace(); } } }); boardStdoutRedirectThread.start(); LOG.info("Starting thread to redirect stderr of board process"); Thread boardStderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream())); String boardStderrLog; while ((boardStderrLog = reader.readLine()) != null) { LOG.debug(boardStderrLog); } } catch (Exception e) { LOG.warn("Error in thread boardStderrRedirectThread"); e.printStackTrace(); } } }); boardStderrRedirectThread.start(); amClient.reportTensorBoardURL(boardUrl); LOG.info("Container index is " + index + ", report board url:" + boardUrl); } catch (Exception e) { LOG.error("Board Process failed. For more detail: " + e); } } int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL); this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess); LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId); containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId); containerReporter.setDaemon(true); containerReporter.start(); int code = -1; while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) { Utilities.sleep(updateAppStatusInterval); try { code = xlearningProcess.exitValue(); } catch (IllegalThreadStateException e) { LOG.debug("XLearning Process is running"); } } if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) { if (code == -1 || code == 0) { this.uploadOutputFiles(); } } if (this.role.equals(XLearningConstants.PS)) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } if (this.role.equals("server")) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } //As role is worker if (code == 0) { this.uploadOutputFiles(); } else { return false; } return true; }
#vulnerable code private Boolean run() throws IOException { try { if (this.role.equals(XLearningConstants.WORKER)) { prepareInputFiles(); } if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) { createLocalOutputDir(); } } catch (InterruptedException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } catch (ExecutionException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } if ("TENSORFLOW".equals(xlearningAppType) && !single) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()), reservedSocket.getLocalPort(), this.role, this.index); while (true) { //TODO may be need encode use Base64 while used in Env this.clusterDef = amClient.getClusterDef(); if (this.clusterDef != null) { LOG.info("Cluster def is: " + this.clusterDef); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } } if (xlearningAppType.equals("DISTLIGHTGBM")) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); this.lightGBMLocalPort = reservedSocket.getLocalPort(); InetAddress address = null; try { address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString())); } catch (UnknownHostException e) { LOG.info("acquire host ip failed " + e); reportFailedAndExit(); } String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort(); LOG.info("lightGBM ip port string is: " + ipPortStr); amClient.reportLightGbmIpPort(containerId, ipPortStr); String lightGBMIpPortStr; while (true) { //TODO may be need encode use Base64 while used in Env lightGBMIpPortStr = amClient.getLightGbmIpPortStr(); if (lightGBMIpPortStr != null) { LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } Type type = new TypeToken<ConcurrentHashMap<String, String>>() { }.getType(); ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type); PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8"); for (String str : map.keySet()) { writer.println(map.get(str)); } writer.close(); } List<String> envList = new ArrayList<>(20); envList.add("PATH=" + System.getenv("PATH")); envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME")); envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME")); envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME")); envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native"); envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path")); envList.add("PYTHONUNBUFFERED=1"); envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList); if ("TENSORFLOW".equals(xlearningAppType)) { envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index); envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role); if (!single) { /** * set TF_CLUSTER_DEF in env * python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])" */ envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef); } } else if (xlearningAppType.equals("MXNET")) { if (!singleMx) { String dmlcID; if (this.role.equals("worker")) { dmlcID = "DMLC_WORKER_ID"; } else { dmlcID = "DMLC_SERVER_ID"; } envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI")); envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT")); envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER")); envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER")); envList.add(dmlcID + "=" + this.index); envList.add("DMLC_ROLE=" + this.role); } } else if (xlearningAppType.equals("DISTXGBOOST")) { envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI")); envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT")); envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER")); envList.add("DMLC_TASK_ID=" + this.index); envList.add("DMLC_ROLE=" + this.role); } else if (xlearningAppType.equals("DISTLIGHTGBM")) { envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString())); envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort); } if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) { envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList); if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) { LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)); envList.remove(envList.size() - 1); LOG.warn("InputFile list had written to local file: inputFileList.txt !!"); PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8"); writer.println(this.inputFileList); writer.close(); } } String[] env = envList.toArray(new String[envList.size()]); String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString()); LOG.info("Executing command:" + command); Runtime rt = Runtime.getRuntime(); //close reserved socket as tf will bind this port later this.reservedSocket.close(); final Process xlearningProcess = rt.exec(command, env); Date now = new Date(); heartbeatThread.setContainersStartTime(now.toString()); if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) { LOG.info("Starting thread to redirect stdin of xlearning process"); Thread stdinRedirectThread = new Thread(new Runnable() { @Override public void run() { try { OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream()); File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME)); GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile)); boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE); List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId)); JobConf jobConf = new JobConf(conf); RecordReader reader; InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class), jobConf); for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) { LOG.info("Epoch " + (j + 1) + " starting..."); for (int i = 0, len = inputs.size(); i < len; i++) { LOG.info("split " + (i + 1) + " is handling..."); reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL); Object key = reader.createKey(); Object value = reader.createValue(); Boolean finished = false; while (!finished) { try { finished = !reader.next(key, value); if (finished) { break; } osw.write(value.toString()); osw.write("\n"); if (j == 0 && isCache) { if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) { gos.write(value.toString().getBytes()); gos.write("\n".getBytes()); if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) { LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M " + "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M."); gzFile.delete(); LOG.info("Local cache file deleted and will not use cache."); isCache = false; } } } } catch (EOFException e) { finished = true; e.printStackTrace(); } } reader.close(); LOG.info("split " + (i + 1) + " is finished."); } LOG.info("Epoch " + (j + 1) + " finished."); if (isCache) { break; } } osw.close(); gos.close(); } catch (Exception e) { LOG.warn("Exception in thread stdinRedirectThread"); e.printStackTrace(); } } }); stdinRedirectThread.start(); } List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) { LOG.info("Starting thread to redirect stream stdout of xlearning process"); final Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); JobConf jobConf = new JobConf(conf); jobConf.setOutputKeyClass(Text.class); jobConf.setOutputValueClass(Text.class); jobConf.setBoolean("mapred.output.compress", true); jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec"); jobConf.setOutputFormat(TextMultiOutputFormat.class); Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString()); FileSystem dfs = remotePath.getFileSystem(jobConf); jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString()); OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class), jobConf); outputFormat.checkOutputSpecs(dfs, jobConf); JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0); TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0); jobConf.set("mapred.tip.id", taId.getTaskID().toString()); jobConf.set("mapred.task.id", taId.toString()); jobConf.set("mapred.job.id", jobID.toString()); amClient.reportMapedTaskID(containerId, taId.toString()); RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL); String xlearningStreamResultLine; while ((xlearningStreamResultLine = reader.readLine()) != null) { writer.write(null, xlearningStreamResultLine); } writer.close(Reporter.NULL); reader.close(); dfs.close(); } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } else { LOG.info("Starting thread to redirect stdout of xlearning process"); Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); String xlearningStdoutLog; while ((xlearningStdoutLog = reader.readLine()) != null) { LOG.info(xlearningStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } LOG.info("Starting thread to redirect stderr of xlearning process"); Thread stderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream())); String xlearningStderrLog; while ((xlearningStderrLog = reader.readLine()) != null) { if (xlearningStderrLog.contains("reporter progress")) { heartbeatThread.setProgressLog(xlearningStderrLog); } else { LOG.info(xlearningStderrLog); } } } catch (Exception e) { LOG.warn("Error in thread stderrRedirectThread"); e.printStackTrace(); } } }); stderrRedirectThread.start(); heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING); //Start board process int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX); Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE); if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) { Socket boardReservedSocket = new Socket(); try { boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0)); } catch (IOException e) { LOG.error("Can not get available port"); reportFailedAndExit(); } String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString()); String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR); int boardPort = boardReservedSocket.getLocalPort(); String boardCommand; if ("TENSORFLOW".equals(xlearningAppType)) { int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL); boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir; } else { int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT); boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout; String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB); if (!(modelpb.equals("") || modelpb == null)) { boardCommand = boardCommand + " --model_pb=" + modelpb; } } String boardUrl = "http://" + boardHost + ":" + boardPort; LOG.info("Executing board command:" + boardCommand); boardReservedSocket.close(); try { final Process boardProcess = rt.exec(boardCommand, env); LOG.info("Starting thread to redirect stdout of board process"); Thread boardStdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream())); String boardStdoutLog; while ((boardStdoutLog = reader.readLine()) != null) { LOG.debug(boardStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread boardStdoutRedirectThread"); e.printStackTrace(); } } }); boardStdoutRedirectThread.start(); LOG.info("Starting thread to redirect stderr of board process"); Thread boardStderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream())); String boardStderrLog; while ((boardStderrLog = reader.readLine()) != null) { LOG.debug(boardStderrLog); } } catch (Exception e) { LOG.warn("Error in thread boardStderrRedirectThread"); e.printStackTrace(); } } }); boardStderrRedirectThread.start(); amClient.reportTensorBoardURL(boardUrl); LOG.info("Container index is " + index + ", report board url:" + boardUrl); } catch (Exception e) { LOG.error("Board Process failed. For more detail: " + e); } } int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL); this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess); LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId); containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId); containerReporter.setDaemon(true); containerReporter.start(); int code = -1; while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) { Utilities.sleep(updateAppStatusInterval); try { code = xlearningProcess.exitValue(); } catch (IllegalThreadStateException e) { LOG.debug("XLearning Process is running"); } } if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) { if (code == -1 || code == 0) { this.uploadOutputFiles(); } } if (this.role.equals(XLearningConstants.PS)) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } if (this.role.equals("server")) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } //As role is worker if (code == 0) { this.uploadOutputFiles(); } else { return false; } return true; } #location 196 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private Boolean run() throws IOException { try { if (this.role.equals(XLearningConstants.WORKER)) { prepareInputFiles(); } if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) { createLocalOutputDir(); } } catch (InterruptedException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } catch (ExecutionException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } if ("TENSORFLOW".equals(xlearningAppType) && !single) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()), reservedSocket.getLocalPort(), this.role, this.index); while (true) { //TODO may be need encode use Base64 while used in Env this.clusterDef = amClient.getClusterDef(); if (this.clusterDef != null) { LOG.info("Cluster def is: " + this.clusterDef); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } } if (xlearningAppType.equals("DISTLIGHTGBM")) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); this.lightGBMLocalPort = reservedSocket.getLocalPort(); InetAddress address = null; try { address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString())); } catch (UnknownHostException e) { LOG.info("acquire host ip failed " + e); reportFailedAndExit(); } String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort(); LOG.info("lightGBM ip port string is: " + ipPortStr); amClient.reportLightGbmIpPort(containerId, ipPortStr); String lightGBMIpPortStr; while (true) { //TODO may be need encode use Base64 while used in Env lightGBMIpPortStr = amClient.getLightGbmIpPortStr(); if (lightGBMIpPortStr != null) { LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } Type type = new TypeToken<ConcurrentHashMap<String, String>>() { }.getType(); ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type); PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8"); for (String str : map.keySet()) { writer.println(map.get(str)); } writer.close(); } List<String> envList = new ArrayList<>(20); envList.add("PATH=" + System.getenv("PATH")); envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME")); envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME")); envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME")); envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native"); envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path")); envList.add("PYTHONUNBUFFERED=1"); envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList); if ("TENSORFLOW".equals(xlearningAppType)) { envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index); envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role); if (!single) { /** * set TF_CLUSTER_DEF in env * python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])" */ envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef); } } else if (xlearningAppType.equals("MXNET")) { if (!singleMx) { String dmlcID; if (this.role.equals("worker")) { dmlcID = "DMLC_WORKER_ID"; } else { dmlcID = "DMLC_SERVER_ID"; } envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI")); envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT")); envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER")); envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER")); envList.add(dmlcID + "=" + this.index); envList.add("DMLC_ROLE=" + this.role); } } else if (xlearningAppType.equals("DISTXGBOOST")) { envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI")); envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT")); envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER")); envList.add("DMLC_TASK_ID=" + this.index); envList.add("DMLC_ROLE=" + this.role); } else if (xlearningAppType.equals("DISTLIGHTGBM")) { envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString())); envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort); } if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) { envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList); if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) { LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)); envList.remove(envList.size() - 1); LOG.warn("InputFile list had written to local file: inputFileList.txt !!"); PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8"); writer.println(this.inputFileList); writer.close(); } } String[] env = envList.toArray(new String[envList.size()]); String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString()); LOG.info("Executing command:" + command); Runtime rt = Runtime.getRuntime(); //close reserved socket as tf will bind this port later this.reservedSocket.close(); final Process xlearningProcess = rt.exec(command, env); Date now = new Date(); heartbeatThread.setContainersStartTime(now.toString()); if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) { LOG.info("Starting thread to redirect stdin of xlearning process"); Thread stdinRedirectThread = new Thread(new Runnable() { @Override public void run() { try { OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream()); File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME)); GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile)); boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE); List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId)); JobConf jobConf = new JobConf(conf); RecordReader reader; InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class), jobConf); for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) { LOG.info("Epoch " + (j + 1) + " starting..."); for (int i = 0, len = inputs.size(); i < len; i++) { LOG.info("split " + (i + 1) + " is handling..."); reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL); Object key = reader.createKey(); Object value = reader.createValue(); Boolean finished = false; while (!finished) { try { finished = !reader.next(key, value); if (finished) { break; } osw.write(value.toString()); osw.write("\n"); if (j == 0 && isCache) { if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) { gos.write(value.toString().getBytes()); gos.write("\n".getBytes()); if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) { LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M " + "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M."); gzFile.delete(); LOG.info("Local cache file deleted and will not use cache."); isCache = false; } } } } catch (EOFException e) { finished = true; e.printStackTrace(); } } reader.close(); LOG.info("split " + (i + 1) + " is finished."); } LOG.info("Epoch " + (j + 1) + " finished."); if (isCache) { break; } } osw.close(); gos.close(); } catch (Exception e) { LOG.warn("Exception in thread stdinRedirectThread"); e.printStackTrace(); } } }); stdinRedirectThread.start(); } List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) { LOG.info("Starting thread to redirect stream stdout of xlearning process"); final Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); JobConf jobConf = new JobConf(conf); jobConf.setOutputKeyClass(Text.class); jobConf.setOutputValueClass(Text.class); jobConf.setBoolean("mapred.output.compress", true); jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec"); jobConf.setOutputFormat(TextMultiOutputFormat.class); Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString()); FileSystem dfs = remotePath.getFileSystem(jobConf); jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString()); OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class), jobConf); outputFormat.checkOutputSpecs(dfs, jobConf); JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0); TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0); jobConf.set("mapred.tip.id", taId.getTaskID().toString()); jobConf.set("mapred.task.id", taId.toString()); jobConf.set("mapred.job.id", jobID.toString()); amClient.reportMapedTaskID(containerId, taId.toString()); RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL); String xlearningStreamResultLine; while ((xlearningStreamResultLine = reader.readLine()) != null) { writer.write(null, xlearningStreamResultLine); } writer.close(Reporter.NULL); reader.close(); dfs.close(); } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } else { LOG.info("Starting thread to redirect stdout of xlearning process"); Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); String xlearningStdoutLog; while ((xlearningStdoutLog = reader.readLine()) != null) { LOG.info(xlearningStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } LOG.info("Starting thread to redirect stderr of xlearning process"); Thread stderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream())); String xlearningStderrLog; while ((xlearningStderrLog = reader.readLine()) != null) { if (xlearningStderrLog.contains("reporter progress")) { heartbeatThread.setProgressLog(xlearningStderrLog); } else { LOG.info(xlearningStderrLog); } } } catch (Exception e) { LOG.warn("Error in thread stderrRedirectThread"); e.printStackTrace(); } } }); stderrRedirectThread.start(); heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING); //Start board process int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX); Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE); if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) { Socket boardReservedSocket = new Socket(); try { boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0)); } catch (IOException e) { LOG.error("Can not get available port"); reportFailedAndExit(); } String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString()); String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR); int boardPort = boardReservedSocket.getLocalPort(); String boardCommand; if ("TENSORFLOW".equals(xlearningAppType)) { int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL); boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir; } else { int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT); boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout; String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB); if (!(modelpb.equals("") || modelpb == null)) { boardCommand = boardCommand + " --model_pb=" + modelpb; } } String boardUrl = "http://" + boardHost + ":" + boardPort; LOG.info("Executing board command:" + boardCommand); boardReservedSocket.close(); try { final Process boardProcess = rt.exec(boardCommand, env); LOG.info("Starting thread to redirect stdout of board process"); Thread boardStdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream())); String boardStdoutLog; while ((boardStdoutLog = reader.readLine()) != null) { LOG.debug(boardStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread boardStdoutRedirectThread"); e.printStackTrace(); } } }); boardStdoutRedirectThread.start(); LOG.info("Starting thread to redirect stderr of board process"); Thread boardStderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream())); String boardStderrLog; while ((boardStderrLog = reader.readLine()) != null) { LOG.debug(boardStderrLog); } } catch (Exception e) { LOG.warn("Error in thread boardStderrRedirectThread"); e.printStackTrace(); } } }); boardStderrRedirectThread.start(); amClient.reportTensorBoardURL(boardUrl); LOG.info("Container index is " + index + ", report board url:" + boardUrl); } catch (Exception e) { LOG.error("Board Process failed. For more detail: " + e); } } int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL); this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess); LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId); containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId); containerReporter.setDaemon(true); containerReporter.start(); int code = -1; while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) { Utilities.sleep(updateAppStatusInterval); try { code = xlearningProcess.exitValue(); } catch (IllegalThreadStateException e) { LOG.debug("XLearning Process is running"); } } if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) { if (code == -1 || code == 0) { this.uploadOutputFiles(); } } if (this.role.equals(XLearningConstants.PS)) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } if (this.role.equals("server")) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } //As role is worker if (code == 0) { this.uploadOutputFiles(); } else { return false; } return true; }
#vulnerable code private Boolean run() throws IOException { try { if (this.role.equals(XLearningConstants.WORKER)) { prepareInputFiles(); } if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) { createLocalOutputDir(); } } catch (InterruptedException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } catch (ExecutionException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } if ("TENSORFLOW".equals(xlearningAppType) && !single) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()), reservedSocket.getLocalPort(), this.role, this.index); while (true) { //TODO may be need encode use Base64 while used in Env this.clusterDef = amClient.getClusterDef(); if (this.clusterDef != null) { LOG.info("Cluster def is: " + this.clusterDef); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } } if (xlearningAppType.equals("DISTLIGHTGBM")) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); this.lightGBMLocalPort = reservedSocket.getLocalPort(); InetAddress address = null; try { address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString())); } catch (UnknownHostException e) { LOG.info("acquire host ip failed " + e); reportFailedAndExit(); } String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort(); LOG.info("lightGBM ip port string is: " + ipPortStr); amClient.reportLightGbmIpPort(containerId, ipPortStr); String lightGBMIpPortStr; while (true) { //TODO may be need encode use Base64 while used in Env lightGBMIpPortStr = amClient.getLightGbmIpPortStr(); if (lightGBMIpPortStr != null) { LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } Type type = new TypeToken<ConcurrentHashMap<String, String>>() { }.getType(); ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type); PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8"); for (String str : map.keySet()) { writer.println(map.get(str)); } writer.close(); } List<String> envList = new ArrayList<>(20); envList.add("PATH=" + System.getenv("PATH")); envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME")); envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME")); envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME")); envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native"); envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path")); envList.add("PYTHONUNBUFFERED=1"); envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList); if ("TENSORFLOW".equals(xlearningAppType)) { envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index); envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role); if (!single) { /** * set TF_CLUSTER_DEF in env * python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])" */ envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef); } } else if (xlearningAppType.equals("MXNET")) { if (!singleMx) { String dmlcID; if (this.role.equals("worker")) { dmlcID = "DMLC_WORKER_ID"; } else { dmlcID = "DMLC_SERVER_ID"; } envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI")); envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT")); envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER")); envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER")); envList.add(dmlcID + "=" + this.index); envList.add("DMLC_ROLE=" + this.role); } } else if (xlearningAppType.equals("DISTXGBOOST")) { envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI")); envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT")); envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER")); envList.add("DMLC_TASK_ID=" + this.index); envList.add("DMLC_ROLE=" + this.role); } else if (xlearningAppType.equals("DISTLIGHTGBM")) { envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString())); envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort); } if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) { envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList); if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) { LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)); envList.remove(envList.size() - 1); LOG.warn("InputFile list had written to local file: inputFileList.txt !!"); PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8"); writer.println(this.inputFileList); writer.close(); } } String[] env = envList.toArray(new String[envList.size()]); String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString()); LOG.info("Executing command:" + command); Runtime rt = Runtime.getRuntime(); //close reserved socket as tf will bind this port later this.reservedSocket.close(); final Process xlearningProcess = rt.exec(command, env); Date now = new Date(); heartbeatThread.setContainersStartTime(now.toString()); if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) { LOG.info("Starting thread to redirect stdin of xlearning process"); Thread stdinRedirectThread = new Thread(new Runnable() { @Override public void run() { try { OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream()); File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME)); GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile)); boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE); List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId)); JobConf jobConf = new JobConf(conf); RecordReader reader; InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class), jobConf); for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) { LOG.info("Epoch " + (j + 1) + " starting..."); for (int i = 0, len = inputs.size(); i < len; i++) { LOG.info("split " + (i + 1) + " is handling..."); reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL); Object key = reader.createKey(); Object value = reader.createValue(); Boolean finished = false; while (!finished) { try { finished = !reader.next(key, value); if (finished) { break; } osw.write(value.toString()); osw.write("\n"); if (j == 0 && isCache) { if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) { gos.write(value.toString().getBytes()); gos.write("\n".getBytes()); if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) { LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M " + "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M."); gzFile.delete(); LOG.info("Local cache file deleted and will not use cache."); isCache = false; } } } } catch (EOFException e) { finished = true; e.printStackTrace(); } } reader.close(); LOG.info("split " + (i + 1) + " is finished."); } LOG.info("Epoch " + (j + 1) + " finished."); if (isCache) { break; } } osw.close(); gos.close(); } catch (Exception e) { LOG.warn("Exception in thread stdinRedirectThread"); e.printStackTrace(); } } }); stdinRedirectThread.start(); } List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) { LOG.info("Starting thread to redirect stream stdout of xlearning process"); final Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); JobConf jobConf = new JobConf(conf); jobConf.setOutputKeyClass(Text.class); jobConf.setOutputValueClass(Text.class); jobConf.setBoolean("mapred.output.compress", true); jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec"); jobConf.setOutputFormat(TextMultiOutputFormat.class); Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString()); FileSystem dfs = remotePath.getFileSystem(jobConf); jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString()); OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class), jobConf); outputFormat.checkOutputSpecs(dfs, jobConf); JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0); TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0); jobConf.set("mapred.tip.id", taId.getTaskID().toString()); jobConf.set("mapred.task.id", taId.toString()); jobConf.set("mapred.job.id", jobID.toString()); amClient.reportMapedTaskID(containerId, taId.toString()); RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL); String xlearningStreamResultLine; while ((xlearningStreamResultLine = reader.readLine()) != null) { writer.write(null, xlearningStreamResultLine); } writer.close(Reporter.NULL); reader.close(); dfs.close(); } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } else { LOG.info("Starting thread to redirect stdout of xlearning process"); Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); String xlearningStdoutLog; while ((xlearningStdoutLog = reader.readLine()) != null) { LOG.info(xlearningStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } LOG.info("Starting thread to redirect stderr of xlearning process"); Thread stderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream())); String xlearningStderrLog; while ((xlearningStderrLog = reader.readLine()) != null) { if (xlearningStderrLog.contains("reporter progress")) { heartbeatThread.setProgressLog(xlearningStderrLog); } else { LOG.info(xlearningStderrLog); } } } catch (Exception e) { LOG.warn("Error in thread stderrRedirectThread"); e.printStackTrace(); } } }); stderrRedirectThread.start(); heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING); //Start board process int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX); Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE); if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) { Socket boardReservedSocket = new Socket(); try { boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0)); } catch (IOException e) { LOG.error("Can not get available port"); reportFailedAndExit(); } String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString()); String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR); int boardPort = boardReservedSocket.getLocalPort(); String boardCommand; if ("TENSORFLOW".equals(xlearningAppType)) { int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL); boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir; } else { int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT); boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout; String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB); if (!(modelpb.equals("") || modelpb == null)) { boardCommand = boardCommand + " --model_pb=" + modelpb; } } String boardUrl = "http://" + boardHost + ":" + boardPort; LOG.info("Executing board command:" + boardCommand); boardReservedSocket.close(); try { final Process boardProcess = rt.exec(boardCommand, env); LOG.info("Starting thread to redirect stdout of board process"); Thread boardStdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream())); String boardStdoutLog; while ((boardStdoutLog = reader.readLine()) != null) { LOG.debug(boardStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread boardStdoutRedirectThread"); e.printStackTrace(); } } }); boardStdoutRedirectThread.start(); LOG.info("Starting thread to redirect stderr of board process"); Thread boardStderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream())); String boardStderrLog; while ((boardStderrLog = reader.readLine()) != null) { LOG.debug(boardStderrLog); } } catch (Exception e) { LOG.warn("Error in thread boardStderrRedirectThread"); e.printStackTrace(); } } }); boardStderrRedirectThread.start(); amClient.reportTensorBoardURL(boardUrl); LOG.info("Container index is " + index + ", report board url:" + boardUrl); } catch (Exception e) { LOG.error("Board Process failed. For more detail: " + e); } } int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL); if (this.role.equals(XLearningConstants.WORKER)) { this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess); LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId); containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId); containerReporter.setDaemon(true); containerReporter.start(); } int code = -1; while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) { Utilities.sleep(updateAppStatusInterval); try { code = xlearningProcess.exitValue(); } catch (IllegalThreadStateException e) { LOG.debug("XLearning Process is running"); } } if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) { if (code == -1 || code == 0) { this.uploadOutputFiles(); } } if (this.role.equals(XLearningConstants.PS)) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } if (this.role.equals("server")) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } //As role is worker if (code == 0) { this.uploadOutputFiles(); } else { return false; } return true; } #location 403 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private Boolean run() throws IOException { try { if (this.role.equals(XLearningConstants.WORKER)) { prepareInputFiles(); } if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) { createLocalOutputDir(); } } catch (InterruptedException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } catch (ExecutionException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } if ("TENSORFLOW".equals(xlearningAppType) && !single) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()), reservedSocket.getLocalPort(), this.role, this.index); while (true) { //TODO may be need encode use Base64 while used in Env this.clusterDef = amClient.getClusterDef(); if (this.clusterDef != null) { LOG.info("Cluster def is: " + this.clusterDef); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } } if (xlearningAppType.equals("DISTLIGHTGBM")) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); this.lightGBMLocalPort = reservedSocket.getLocalPort(); InetAddress address = null; try { address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString())); } catch (UnknownHostException e) { LOG.info("acquire host ip failed " + e); reportFailedAndExit(); } String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort(); LOG.info("lightGBM ip port string is: " + ipPortStr); amClient.reportLightGbmIpPort(containerId, ipPortStr); String lightGBMIpPortStr; while (true) { //TODO may be need encode use Base64 while used in Env lightGBMIpPortStr = amClient.getLightGbmIpPortStr(); if (lightGBMIpPortStr != null) { LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } Type type = new TypeToken<ConcurrentHashMap<String, String>>() { }.getType(); ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type); PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8"); for (String str : map.keySet()) { writer.println(map.get(str)); } writer.close(); } List<String> envList = new ArrayList<>(20); envList.add("PATH=" + System.getenv("PATH")); envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME")); envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME")); envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME")); envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native"); envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path")); envList.add("PYTHONUNBUFFERED=1"); envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList); if ("TENSORFLOW".equals(xlearningAppType)) { envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index); envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role); if (!single) { /** * set TF_CLUSTER_DEF in env * python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])" */ envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef); } } else if (xlearningAppType.equals("MXNET")) { if (!singleMx) { String dmlcID; if (this.role.equals("worker")) { dmlcID = "DMLC_WORKER_ID"; } else { dmlcID = "DMLC_SERVER_ID"; } envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI")); envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT")); envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER")); envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER")); envList.add(dmlcID + "=" + this.index); envList.add("DMLC_ROLE=" + this.role); } } else if (xlearningAppType.equals("DISTXGBOOST")) { envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI")); envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT")); envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER")); envList.add("DMLC_TASK_ID=" + this.index); envList.add("DMLC_ROLE=" + this.role); } else if (xlearningAppType.equals("DISTLIGHTGBM")) { envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString())); envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort); } if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) { envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList); if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) { LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)); envList.remove(envList.size() - 1); LOG.warn("InputFile list had written to local file: inputFileList.txt !!"); PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8"); writer.println(this.inputFileList); writer.close(); } } String[] env = envList.toArray(new String[envList.size()]); String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString()); LOG.info("Executing command:" + command); Runtime rt = Runtime.getRuntime(); //close reserved socket as tf will bind this port later this.reservedSocket.close(); final Process xlearningProcess = rt.exec(command, env); Date now = new Date(); heartbeatThread.setContainersStartTime(now.toString()); if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) { LOG.info("Starting thread to redirect stdin of xlearning process"); Thread stdinRedirectThread = new Thread(new Runnable() { @Override public void run() { try { OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream()); File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME)); GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile)); boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE); List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId)); JobConf jobConf = new JobConf(conf); RecordReader reader; InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class), jobConf); for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) { LOG.info("Epoch " + (j + 1) + " starting..."); for (int i = 0, len = inputs.size(); i < len; i++) { LOG.info("split " + (i + 1) + " is handling..."); reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL); Object key = reader.createKey(); Object value = reader.createValue(); Boolean finished = false; while (!finished) { try { finished = !reader.next(key, value); if (finished) { break; } osw.write(value.toString()); osw.write("\n"); if (j == 0 && isCache) { if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) { gos.write(value.toString().getBytes()); gos.write("\n".getBytes()); if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) { LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M " + "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M."); gzFile.delete(); LOG.info("Local cache file deleted and will not use cache."); isCache = false; } } } } catch (EOFException e) { finished = true; e.printStackTrace(); } } reader.close(); LOG.info("split " + (i + 1) + " is finished."); } LOG.info("Epoch " + (j + 1) + " finished."); if (isCache) { break; } } osw.close(); gos.close(); } catch (Exception e) { LOG.warn("Exception in thread stdinRedirectThread"); e.printStackTrace(); } } }); stdinRedirectThread.start(); } List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) { LOG.info("Starting thread to redirect stream stdout of xlearning process"); final Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); JobConf jobConf = new JobConf(conf); jobConf.setOutputKeyClass(Text.class); jobConf.setOutputValueClass(Text.class); jobConf.setBoolean("mapred.output.compress", true); jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec"); jobConf.setOutputFormat(TextMultiOutputFormat.class); Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString()); FileSystem dfs = remotePath.getFileSystem(jobConf); jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString()); OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class), jobConf); outputFormat.checkOutputSpecs(dfs, jobConf); JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0); TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0); jobConf.set("mapred.tip.id", taId.getTaskID().toString()); jobConf.set("mapred.task.id", taId.toString()); jobConf.set("mapred.job.id", jobID.toString()); amClient.reportMapedTaskID(containerId, taId.toString()); RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL); String xlearningStreamResultLine; while ((xlearningStreamResultLine = reader.readLine()) != null) { writer.write(null, xlearningStreamResultLine); } writer.close(Reporter.NULL); reader.close(); dfs.close(); } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } else { LOG.info("Starting thread to redirect stdout of xlearning process"); Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); String xlearningStdoutLog; while ((xlearningStdoutLog = reader.readLine()) != null) { LOG.info(xlearningStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } LOG.info("Starting thread to redirect stderr of xlearning process"); Thread stderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream())); String xlearningStderrLog; while ((xlearningStderrLog = reader.readLine()) != null) { if (xlearningStderrLog.contains("reporter progress")) { heartbeatThread.setProgressLog(xlearningStderrLog); } else { LOG.info(xlearningStderrLog); } } } catch (Exception e) { LOG.warn("Error in thread stderrRedirectThread"); e.printStackTrace(); } } }); stderrRedirectThread.start(); heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING); //Start board process int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX); Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE); if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) { Socket boardReservedSocket = new Socket(); try { boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0)); } catch (IOException e) { LOG.error("Can not get available port"); reportFailedAndExit(); } String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString()); String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR); int boardPort = boardReservedSocket.getLocalPort(); String boardCommand; if ("TENSORFLOW".equals(xlearningAppType)) { int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL); boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir; } else { int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT); boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout; String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB); if (!(modelpb.equals("") || modelpb == null)) { boardCommand = boardCommand + " --model_pb=" + modelpb; } } String boardUrl = "http://" + boardHost + ":" + boardPort; LOG.info("Executing board command:" + boardCommand); boardReservedSocket.close(); try { final Process boardProcess = rt.exec(boardCommand, env); LOG.info("Starting thread to redirect stdout of board process"); Thread boardStdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream())); String boardStdoutLog; while ((boardStdoutLog = reader.readLine()) != null) { LOG.debug(boardStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread boardStdoutRedirectThread"); e.printStackTrace(); } } }); boardStdoutRedirectThread.start(); LOG.info("Starting thread to redirect stderr of board process"); Thread boardStderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream())); String boardStderrLog; while ((boardStderrLog = reader.readLine()) != null) { LOG.debug(boardStderrLog); } } catch (Exception e) { LOG.warn("Error in thread boardStderrRedirectThread"); e.printStackTrace(); } } }); boardStderrRedirectThread.start(); amClient.reportTensorBoardURL(boardUrl); LOG.info("Container index is " + index + ", report board url:" + boardUrl); } catch (Exception e) { LOG.error("Board Process failed. For more detail: " + e); } } int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL); this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess); LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId); containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId); containerReporter.setDaemon(true); containerReporter.start(); int code = -1; while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) { Utilities.sleep(updateAppStatusInterval); try { code = xlearningProcess.exitValue(); } catch (IllegalThreadStateException e) { LOG.debug("XLearning Process is running"); } } if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) { if (code == -1 || code == 0) { this.uploadOutputFiles(); } } if (this.role.equals(XLearningConstants.PS)) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } if (this.role.equals("server")) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } //As role is worker if (code == 0) { this.uploadOutputFiles(); } else { return false; } return true; }
#vulnerable code private Boolean run() throws IOException { try { if (this.role.equals(XLearningConstants.WORKER)) { prepareInputFiles(); } if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) { createLocalOutputDir(); } } catch (InterruptedException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } catch (ExecutionException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } if ("TENSORFLOW".equals(xlearningAppType) && !single) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()), reservedSocket.getLocalPort(), this.role, this.index); while (true) { //TODO may be need encode use Base64 while used in Env this.clusterDef = amClient.getClusterDef(); if (this.clusterDef != null) { LOG.info("Cluster def is: " + this.clusterDef); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } } if (xlearningAppType.equals("DISTLIGHTGBM")) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); this.lightGBMLocalPort = reservedSocket.getLocalPort(); InetAddress address = null; try { address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString())); } catch (UnknownHostException e) { LOG.info("acquire host ip failed " + e); reportFailedAndExit(); } String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort(); LOG.info("lightGBM ip port string is: " + ipPortStr); amClient.reportLightGbmIpPort(containerId, ipPortStr); String lightGBMIpPortStr; while (true) { //TODO may be need encode use Base64 while used in Env lightGBMIpPortStr = amClient.getLightGbmIpPortStr(); if (lightGBMIpPortStr != null) { LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } Type type = new TypeToken<ConcurrentHashMap<String, String>>() { }.getType(); ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type); PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8"); for (String str : map.keySet()) { writer.println(map.get(str)); } writer.close(); } List<String> envList = new ArrayList<>(20); envList.add("PATH=" + System.getenv("PATH")); envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME")); envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME")); envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME")); envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native"); envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path")); envList.add("PYTHONUNBUFFERED=1"); envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList); if ("TENSORFLOW".equals(xlearningAppType)) { envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index); envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role); if (!single) { /** * set TF_CLUSTER_DEF in env * python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])" */ envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef); } } else if (xlearningAppType.equals("MXNET")) { if (!singleMx) { String dmlcID; if (this.role.equals("worker")) { dmlcID = "DMLC_WORKER_ID"; } else { dmlcID = "DMLC_SERVER_ID"; } envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI")); envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT")); envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER")); envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER")); envList.add(dmlcID + "=" + this.index); envList.add("DMLC_ROLE=" + this.role); } } else if (xlearningAppType.equals("DISTXGBOOST")) { envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI")); envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT")); envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER")); envList.add("DMLC_TASK_ID=" + this.index); envList.add("DMLC_ROLE=" + this.role); } else if (xlearningAppType.equals("DISTLIGHTGBM")) { envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString())); envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort); } if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) { envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList); if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) { LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)); envList.remove(envList.size() - 1); LOG.warn("InputFile list had written to local file: inputFileList.txt !!"); PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8"); writer.println(this.inputFileList); writer.close(); } } String[] env = envList.toArray(new String[envList.size()]); String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString()); LOG.info("Executing command:" + command); Runtime rt = Runtime.getRuntime(); //close reserved socket as tf will bind this port later this.reservedSocket.close(); final Process xlearningProcess = rt.exec(command, env); Date now = new Date(); heartbeatThread.setContainersStartTime(now.toString()); if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) { LOG.info("Starting thread to redirect stdin of xlearning process"); Thread stdinRedirectThread = new Thread(new Runnable() { @Override public void run() { try { OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream()); File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME)); GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile)); boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE); List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId)); JobConf jobConf = new JobConf(conf); RecordReader reader; InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class), jobConf); for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) { LOG.info("Epoch " + (j + 1) + " starting..."); for (int i = 0, len = inputs.size(); i < len; i++) { LOG.info("split " + (i + 1) + " is handling..."); reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL); Object key = reader.createKey(); Object value = reader.createValue(); Boolean finished = false; while (!finished) { try { finished = !reader.next(key, value); if (finished) { break; } osw.write(value.toString()); osw.write("\n"); if (j == 0 && isCache) { if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) { gos.write(value.toString().getBytes()); gos.write("\n".getBytes()); if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) { LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M " + "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M."); gzFile.delete(); LOG.info("Local cache file deleted and will not use cache."); isCache = false; } } } } catch (EOFException e) { finished = true; e.printStackTrace(); } } reader.close(); LOG.info("split " + (i + 1) + " is finished."); } LOG.info("Epoch " + (j + 1) + " finished."); if (isCache) { break; } } osw.close(); gos.close(); } catch (Exception e) { LOG.warn("Exception in thread stdinRedirectThread"); e.printStackTrace(); } } }); stdinRedirectThread.start(); } List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) { LOG.info("Starting thread to redirect stream stdout of xlearning process"); final Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); JobConf jobConf = new JobConf(conf); jobConf.setOutputKeyClass(Text.class); jobConf.setOutputValueClass(Text.class); jobConf.setBoolean("mapred.output.compress", true); jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec"); jobConf.setOutputFormat(TextMultiOutputFormat.class); Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString()); FileSystem dfs = remotePath.getFileSystem(jobConf); jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString()); OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class), jobConf); outputFormat.checkOutputSpecs(dfs, jobConf); JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0); TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0); jobConf.set("mapred.tip.id", taId.getTaskID().toString()); jobConf.set("mapred.task.id", taId.toString()); jobConf.set("mapred.job.id", jobID.toString()); amClient.reportMapedTaskID(containerId, taId.toString()); RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL); String xlearningStreamResultLine; while ((xlearningStreamResultLine = reader.readLine()) != null) { writer.write(null, xlearningStreamResultLine); } writer.close(Reporter.NULL); reader.close(); dfs.close(); } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } else { LOG.info("Starting thread to redirect stdout of xlearning process"); Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); String xlearningStdoutLog; while ((xlearningStdoutLog = reader.readLine()) != null) { LOG.info(xlearningStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } LOG.info("Starting thread to redirect stderr of xlearning process"); Thread stderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream())); String xlearningStderrLog; while ((xlearningStderrLog = reader.readLine()) != null) { if (xlearningStderrLog.contains("reporter progress")) { heartbeatThread.setProgressLog(xlearningStderrLog); } else { LOG.info(xlearningStderrLog); } } } catch (Exception e) { LOG.warn("Error in thread stderrRedirectThread"); e.printStackTrace(); } } }); stderrRedirectThread.start(); heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING); //Start board process int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX); Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE); if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) { Socket boardReservedSocket = new Socket(); try { boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0)); } catch (IOException e) { LOG.error("Can not get available port"); reportFailedAndExit(); } String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString()); String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR); int boardPort = boardReservedSocket.getLocalPort(); String boardCommand; if ("TENSORFLOW".equals(xlearningAppType)) { int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL); boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir; } else { int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT); boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout; String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB); if (!(modelpb.equals("") || modelpb == null)) { boardCommand = boardCommand + " --model_pb=" + modelpb; } } String boardUrl = "http://" + boardHost + ":" + boardPort; LOG.info("Executing board command:" + boardCommand); boardReservedSocket.close(); try { final Process boardProcess = rt.exec(boardCommand, env); LOG.info("Starting thread to redirect stdout of board process"); Thread boardStdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream())); String boardStdoutLog; while ((boardStdoutLog = reader.readLine()) != null) { LOG.debug(boardStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread boardStdoutRedirectThread"); e.printStackTrace(); } } }); boardStdoutRedirectThread.start(); LOG.info("Starting thread to redirect stderr of board process"); Thread boardStderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream())); String boardStderrLog; while ((boardStderrLog = reader.readLine()) != null) { LOG.debug(boardStderrLog); } } catch (Exception e) { LOG.warn("Error in thread boardStderrRedirectThread"); e.printStackTrace(); } } }); boardStderrRedirectThread.start(); amClient.reportTensorBoardURL(boardUrl); LOG.info("Container index is " + index + ", report board url:" + boardUrl); } catch (Exception e) { LOG.error("Board Process failed. For more detail: " + e); } } int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL); if (this.role.equals(XLearningConstants.WORKER)) { this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess); LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId); containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId); containerReporter.setDaemon(true); containerReporter.start(); } int code = -1; while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) { Utilities.sleep(updateAppStatusInterval); try { code = xlearningProcess.exitValue(); } catch (IllegalThreadStateException e) { LOG.debug("XLearning Process is running"); } } if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) { if (code == -1 || code == 0) { this.uploadOutputFiles(); } } if (this.role.equals(XLearningConstants.PS)) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } if (this.role.equals("server")) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } //As role is worker if (code == 0) { this.uploadOutputFiles(); } else { return false; } return true; } #location 392 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private Boolean run() throws IOException { try { if (this.role.equals(XLearningConstants.WORKER)) { prepareInputFiles(); } if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) { createLocalOutputDir(); } } catch (InterruptedException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } catch (ExecutionException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } if ("TENSORFLOW".equals(xlearningAppType) && !single) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()), reservedSocket.getLocalPort(), this.role, this.index); while (true) { //TODO may be need encode use Base64 while used in Env this.clusterDef = amClient.getClusterDef(); if (this.clusterDef != null) { LOG.info("Cluster def is: " + this.clusterDef); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } } if (xlearningAppType.equals("DISTLIGHTGBM")) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); this.lightGBMLocalPort = reservedSocket.getLocalPort(); InetAddress address = null; try { address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString())); } catch (UnknownHostException e) { LOG.info("acquire host ip failed " + e); reportFailedAndExit(); } String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort(); LOG.info("lightGBM ip port string is: " + ipPortStr); amClient.reportLightGbmIpPort(containerId, ipPortStr); String lightGBMIpPortStr; while (true) { //TODO may be need encode use Base64 while used in Env lightGBMIpPortStr = amClient.getLightGbmIpPortStr(); if (lightGBMIpPortStr != null) { LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } Type type = new TypeToken<ConcurrentHashMap<String, String>>() { }.getType(); ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type); PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8"); for (String str : map.keySet()) { writer.println(map.get(str)); } writer.close(); } List<String> envList = new ArrayList<>(20); envList.add("PATH=" + System.getenv("PATH")); envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME")); envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME")); envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME")); envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native"); envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path")); envList.add("PYTHONUNBUFFERED=1"); envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList); if ("TENSORFLOW".equals(xlearningAppType)) { envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index); envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role); if (!single) { /** * set TF_CLUSTER_DEF in env * python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])" */ envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef); } } else if (xlearningAppType.equals("MXNET")) { if (!singleMx) { String dmlcID; if (this.role.equals("worker")) { dmlcID = "DMLC_WORKER_ID"; } else { dmlcID = "DMLC_SERVER_ID"; } envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI")); envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT")); envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER")); envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER")); envList.add(dmlcID + "=" + this.index); envList.add("DMLC_ROLE=" + this.role); } } else if (xlearningAppType.equals("DISTXGBOOST")) { envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI")); envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT")); envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER")); envList.add("DMLC_TASK_ID=" + this.index); envList.add("DMLC_ROLE=" + this.role); } else if (xlearningAppType.equals("DISTLIGHTGBM")) { envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString())); envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort); } if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) { envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList); if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) { LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)); envList.remove(envList.size() - 1); LOG.warn("InputFile list had written to local file: inputFileList.txt !!"); PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8"); writer.println(this.inputFileList); writer.close(); } } String[] env = envList.toArray(new String[envList.size()]); String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString()); LOG.info("Executing command:" + command); Runtime rt = Runtime.getRuntime(); //close reserved socket as tf will bind this port later this.reservedSocket.close(); final Process xlearningProcess = rt.exec(command, env); Date now = new Date(); heartbeatThread.setContainersStartTime(now.toString()); if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) { LOG.info("Starting thread to redirect stdin of xlearning process"); Thread stdinRedirectThread = new Thread(new Runnable() { @Override public void run() { try { OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream()); File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME)); GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile)); boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE); List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId)); JobConf jobConf = new JobConf(conf); RecordReader reader; InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class), jobConf); for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) { LOG.info("Epoch " + (j + 1) + " starting..."); for (int i = 0, len = inputs.size(); i < len; i++) { LOG.info("split " + (i + 1) + " is handling..."); reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL); Object key = reader.createKey(); Object value = reader.createValue(); Boolean finished = false; while (!finished) { try { finished = !reader.next(key, value); if (finished) { break; } osw.write(value.toString()); osw.write("\n"); if (j == 0 && isCache) { if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) { gos.write(value.toString().getBytes()); gos.write("\n".getBytes()); if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) { LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M " + "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M."); gzFile.delete(); LOG.info("Local cache file deleted and will not use cache."); isCache = false; } } } } catch (EOFException e) { finished = true; e.printStackTrace(); } } reader.close(); LOG.info("split " + (i + 1) + " is finished."); } LOG.info("Epoch " + (j + 1) + " finished."); if (isCache) { break; } } osw.close(); gos.close(); } catch (Exception e) { LOG.warn("Exception in thread stdinRedirectThread"); e.printStackTrace(); } } }); stdinRedirectThread.start(); } List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) { LOG.info("Starting thread to redirect stream stdout of xlearning process"); final Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); JobConf jobConf = new JobConf(conf); jobConf.setOutputKeyClass(Text.class); jobConf.setOutputValueClass(Text.class); jobConf.setBoolean("mapred.output.compress", true); jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec"); jobConf.setOutputFormat(TextMultiOutputFormat.class); Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString()); FileSystem dfs = remotePath.getFileSystem(jobConf); jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString()); OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class), jobConf); outputFormat.checkOutputSpecs(dfs, jobConf); JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0); TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0); jobConf.set("mapred.tip.id", taId.getTaskID().toString()); jobConf.set("mapred.task.id", taId.toString()); jobConf.set("mapred.job.id", jobID.toString()); amClient.reportMapedTaskID(containerId, taId.toString()); RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL); String xlearningStreamResultLine; while ((xlearningStreamResultLine = reader.readLine()) != null) { writer.write(null, xlearningStreamResultLine); } writer.close(Reporter.NULL); reader.close(); dfs.close(); } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } else { LOG.info("Starting thread to redirect stdout of xlearning process"); Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); String xlearningStdoutLog; while ((xlearningStdoutLog = reader.readLine()) != null) { LOG.info(xlearningStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } LOG.info("Starting thread to redirect stderr of xlearning process"); Thread stderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream())); String xlearningStderrLog; while ((xlearningStderrLog = reader.readLine()) != null) { if (xlearningStderrLog.contains("reporter progress")) { heartbeatThread.setProgressLog(xlearningStderrLog); } else { LOG.info(xlearningStderrLog); } } } catch (Exception e) { LOG.warn("Error in thread stderrRedirectThread"); e.printStackTrace(); } } }); stderrRedirectThread.start(); heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING); //Start board process int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX); Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE); if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) { Socket boardReservedSocket = new Socket(); try { boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0)); } catch (IOException e) { LOG.error("Can not get available port"); reportFailedAndExit(); } String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString()); String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR); int boardPort = boardReservedSocket.getLocalPort(); String boardCommand; if ("TENSORFLOW".equals(xlearningAppType)) { int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL); boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir; } else { int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT); boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout; String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB); if (!(modelpb.equals("") || modelpb == null)) { boardCommand = boardCommand + " --model_pb=" + modelpb; } } String boardUrl = "http://" + boardHost + ":" + boardPort; LOG.info("Executing board command:" + boardCommand); boardReservedSocket.close(); try { final Process boardProcess = rt.exec(boardCommand, env); LOG.info("Starting thread to redirect stdout of board process"); Thread boardStdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream())); String boardStdoutLog; while ((boardStdoutLog = reader.readLine()) != null) { LOG.debug(boardStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread boardStdoutRedirectThread"); e.printStackTrace(); } } }); boardStdoutRedirectThread.start(); LOG.info("Starting thread to redirect stderr of board process"); Thread boardStderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream())); String boardStderrLog; while ((boardStderrLog = reader.readLine()) != null) { LOG.debug(boardStderrLog); } } catch (Exception e) { LOG.warn("Error in thread boardStderrRedirectThread"); e.printStackTrace(); } } }); boardStderrRedirectThread.start(); amClient.reportTensorBoardURL(boardUrl); LOG.info("Container index is " + index + ", report board url:" + boardUrl); } catch (Exception e) { LOG.error("Board Process failed. For more detail: " + e); } } int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL); this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess); LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId); containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId); containerReporter.setDaemon(true); containerReporter.start(); int code = -1; while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) { Utilities.sleep(updateAppStatusInterval); try { code = xlearningProcess.exitValue(); } catch (IllegalThreadStateException e) { LOG.debug("XLearning Process is running"); } } if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) { if (code == -1 || code == 0) { this.uploadOutputFiles(); } } if (this.role.equals(XLearningConstants.PS)) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } if (this.role.equals("server")) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } //As role is worker if (code == 0) { this.uploadOutputFiles(); } else { return false; } return true; }
#vulnerable code private Boolean run() throws IOException { try { if (this.role.equals(XLearningConstants.WORKER)) { prepareInputFiles(); } if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) { createLocalOutputDir(); } } catch (InterruptedException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } catch (ExecutionException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } if ("TENSORFLOW".equals(xlearningAppType) && !single) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()), reservedSocket.getLocalPort(), this.role, this.index); while (true) { //TODO may be need encode use Base64 while used in Env this.clusterDef = amClient.getClusterDef(); if (this.clusterDef != null) { LOG.info("Cluster def is: " + this.clusterDef); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } } if (xlearningAppType.equals("DISTLIGHTGBM")) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); this.lightGBMLocalPort = reservedSocket.getLocalPort(); InetAddress address = null; try { address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString())); } catch (UnknownHostException e) { LOG.info("acquire host ip failed " + e); reportFailedAndExit(); } String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort(); LOG.info("lightGBM ip port string is: " + ipPortStr); amClient.reportLightGbmIpPort(containerId, ipPortStr); String lightGBMIpPortStr; while (true) { //TODO may be need encode use Base64 while used in Env lightGBMIpPortStr = amClient.getLightGbmIpPortStr(); if (lightGBMIpPortStr != null) { LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } Type type = new TypeToken<ConcurrentHashMap<String, String>>() { }.getType(); ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type); PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8"); for (String str : map.keySet()) { writer.println(map.get(str)); } writer.close(); } List<String> envList = new ArrayList<>(20); envList.add("PATH=" + System.getenv("PATH")); envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME")); envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME")); envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME")); envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native"); envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path")); envList.add("PYTHONUNBUFFERED=1"); envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList); if ("TENSORFLOW".equals(xlearningAppType)) { envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index); envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role); if (!single) { /** * set TF_CLUSTER_DEF in env * python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])" */ envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef); } } else if (xlearningAppType.equals("MXNET")) { if (!singleMx) { String dmlcID; if (this.role.equals("worker")) { dmlcID = "DMLC_WORKER_ID"; } else { dmlcID = "DMLC_SERVER_ID"; } envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI")); envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT")); envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER")); envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER")); envList.add(dmlcID + "=" + this.index); envList.add("DMLC_ROLE=" + this.role); } } else if (xlearningAppType.equals("DISTXGBOOST")) { envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI")); envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT")); envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER")); envList.add("DMLC_TASK_ID=" + this.index); envList.add("DMLC_ROLE=" + this.role); } else if (xlearningAppType.equals("DISTLIGHTGBM")) { envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString())); envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort); } if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) { envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList); if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) { LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)); envList.remove(envList.size() - 1); LOG.warn("InputFile list had written to local file: inputFileList.txt !!"); PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8"); writer.println(this.inputFileList); writer.close(); } } String[] env = envList.toArray(new String[envList.size()]); String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString()); LOG.info("Executing command:" + command); Runtime rt = Runtime.getRuntime(); //close reserved socket as tf will bind this port later this.reservedSocket.close(); final Process xlearningProcess = rt.exec(command, env); Date now = new Date(); heartbeatThread.setContainersStartTime(now.toString()); if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) { LOG.info("Starting thread to redirect stdin of xlearning process"); Thread stdinRedirectThread = new Thread(new Runnable() { @Override public void run() { try { OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream()); File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME)); GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile)); boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE); List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId)); JobConf jobConf = new JobConf(conf); RecordReader reader; InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class), jobConf); for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) { LOG.info("Epoch " + (j + 1) + " starting..."); for (int i = 0, len = inputs.size(); i < len; i++) { LOG.info("split " + (i + 1) + " is handling..."); reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL); Object key = reader.createKey(); Object value = reader.createValue(); Boolean finished = false; while (!finished) { try { finished = !reader.next(key, value); if (finished) { break; } osw.write(value.toString()); osw.write("\n"); if (j == 0 && isCache) { if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) { gos.write(value.toString().getBytes()); gos.write("\n".getBytes()); if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) { LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M " + "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M."); gzFile.delete(); LOG.info("Local cache file deleted and will not use cache."); isCache = false; } } } } catch (EOFException e) { finished = true; e.printStackTrace(); } } reader.close(); LOG.info("split " + (i + 1) + " is finished."); } LOG.info("Epoch " + (j + 1) + " finished."); if (isCache) { break; } } osw.close(); gos.close(); } catch (Exception e) { LOG.warn("Exception in thread stdinRedirectThread"); e.printStackTrace(); } } }); stdinRedirectThread.start(); } List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) { LOG.info("Starting thread to redirect stream stdout of xlearning process"); final Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); JobConf jobConf = new JobConf(conf); jobConf.setOutputKeyClass(Text.class); jobConf.setOutputValueClass(Text.class); jobConf.setBoolean("mapred.output.compress", true); jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec"); jobConf.setOutputFormat(TextMultiOutputFormat.class); Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString()); FileSystem dfs = remotePath.getFileSystem(jobConf); jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString()); OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class), jobConf); outputFormat.checkOutputSpecs(dfs, jobConf); JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0); TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0); jobConf.set("mapred.tip.id", taId.getTaskID().toString()); jobConf.set("mapred.task.id", taId.toString()); jobConf.set("mapred.job.id", jobID.toString()); amClient.reportMapedTaskID(containerId, taId.toString()); RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL); String xlearningStreamResultLine; while ((xlearningStreamResultLine = reader.readLine()) != null) { writer.write(null, xlearningStreamResultLine); } writer.close(Reporter.NULL); reader.close(); dfs.close(); } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } else { LOG.info("Starting thread to redirect stdout of xlearning process"); Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); String xlearningStdoutLog; while ((xlearningStdoutLog = reader.readLine()) != null) { LOG.info(xlearningStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } LOG.info("Starting thread to redirect stderr of xlearning process"); Thread stderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream())); String xlearningStderrLog; while ((xlearningStderrLog = reader.readLine()) != null) { if (xlearningStderrLog.contains("reporter progress")) { heartbeatThread.setProgressLog(xlearningStderrLog); } else { LOG.info(xlearningStderrLog); } } } catch (Exception e) { LOG.warn("Error in thread stderrRedirectThread"); e.printStackTrace(); } } }); stderrRedirectThread.start(); heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING); //Start board process int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX); Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE); if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) { Socket boardReservedSocket = new Socket(); try { boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0)); } catch (IOException e) { LOG.error("Can not get available port"); reportFailedAndExit(); } String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString()); String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR); int boardPort = boardReservedSocket.getLocalPort(); String boardCommand; if ("TENSORFLOW".equals(xlearningAppType)) { int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL); boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir; } else { int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT); boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout; String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB); if (!(modelpb.equals("") || modelpb == null)) { boardCommand = boardCommand + " --model_pb=" + modelpb; } } String boardUrl = "http://" + boardHost + ":" + boardPort; LOG.info("Executing board command:" + boardCommand); boardReservedSocket.close(); try { final Process boardProcess = rt.exec(boardCommand, env); LOG.info("Starting thread to redirect stdout of board process"); Thread boardStdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream())); String boardStdoutLog; while ((boardStdoutLog = reader.readLine()) != null) { LOG.debug(boardStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread boardStdoutRedirectThread"); e.printStackTrace(); } } }); boardStdoutRedirectThread.start(); LOG.info("Starting thread to redirect stderr of board process"); Thread boardStderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream())); String boardStderrLog; while ((boardStderrLog = reader.readLine()) != null) { LOG.debug(boardStderrLog); } } catch (Exception e) { LOG.warn("Error in thread boardStderrRedirectThread"); e.printStackTrace(); } } }); boardStderrRedirectThread.start(); amClient.reportTensorBoardURL(boardUrl); LOG.info("Container index is " + index + ", report board url:" + boardUrl); } catch (Exception e) { LOG.error("Board Process failed. For more detail: " + e); } } int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL); if (this.role.equals(XLearningConstants.WORKER)) { this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess); LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId); containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId); containerReporter.setDaemon(true); containerReporter.start(); } int code = -1; while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) { Utilities.sleep(updateAppStatusInterval); try { code = xlearningProcess.exitValue(); } catch (IllegalThreadStateException e) { LOG.debug("XLearning Process is running"); } } if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) { if (code == -1 || code == 0) { this.uploadOutputFiles(); } } if (this.role.equals(XLearningConstants.PS)) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } if (this.role.equals("server")) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } //As role is worker if (code == 0) { this.uploadOutputFiles(); } else { return false; } return true; } #location 403 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private Boolean run() throws IOException { try { if (this.role.equals(XLearningConstants.WORKER)) { prepareInputFiles(); } if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) { createLocalOutputDir(); } } catch (InterruptedException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } catch (ExecutionException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } if ("TENSORFLOW".equals(xlearningAppType) && !single) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()), reservedSocket.getLocalPort(), this.role, this.index); while (true) { //TODO may be need encode use Base64 while used in Env this.clusterDef = amClient.getClusterDef(); if (this.clusterDef != null) { LOG.info("Cluster def is: " + this.clusterDef); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } } if (xlearningAppType.equals("DISTLIGHTGBM")) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); this.lightGBMLocalPort = reservedSocket.getLocalPort(); InetAddress address = null; try { address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString())); } catch (UnknownHostException e) { LOG.info("acquire host ip failed " + e); reportFailedAndExit(); } String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort(); LOG.info("lightGBM ip port string is: " + ipPortStr); amClient.reportLightGbmIpPort(containerId, ipPortStr); String lightGBMIpPortStr; while (true) { //TODO may be need encode use Base64 while used in Env lightGBMIpPortStr = amClient.getLightGbmIpPortStr(); if (lightGBMIpPortStr != null) { LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } Type type = new TypeToken<ConcurrentHashMap<String, String>>() { }.getType(); ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type); PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8"); for (String str : map.keySet()) { writer.println(map.get(str)); } writer.close(); } String[] env = null; if ("TENSORFLOW".equals(xlearningAppType)) { if (single) { env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "PYTHONUNBUFFERED=1", XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index, XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role, XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } else { /** * set TF_CLUSTER_DEF in env * python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])" */ env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "PYTHONUNBUFFERED=1", XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef, XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index, XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role, XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } } else if (xlearningAppType.equals("MXNET")) { if (singleMx) { env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "PYTHONUNBUFFERED=1", XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } else if (xlearningAppType.equals("DISTXGBOOST")) { env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"), "DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"), "DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"), "PYTHONUNBUFFERED=1", "DMLC_TASK_ID=" + this.index, "DMLC_ROLE=" + this.role, XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } else if (xlearningAppType.equals("DISTLIGHTGBM")) { env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()), "LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort, "PYTHONUNBUFFERED=1", XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } else { String dmlcID; if (this.role.equals("worker")) { dmlcID = "DMLC_WORKER_ID"; } else { dmlcID = "DMLC_SERVER_ID"; } env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"), "DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"), "DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"), "DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"), "PYTHONUNBUFFERED=1", dmlcID + "=" + this.index, "DMLC_ROLE=" + this.role, XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } } else { env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "PYTHONUNBUFFERED=1", XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString()); LOG.info("Executing command:" + command); Runtime rt = Runtime.getRuntime(); //close reserved socket as tf will bind this port later this.reservedSocket.close(); final Process xlearningProcess = rt.exec(command, env); Date now = new Date(); heartbeatThread.setContainersStartTime(now.toString()); if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).equals("STREAM")) { LOG.info("Starting thread to redirect stdin of xlearning process"); Thread stdinRedirectThread = new Thread(new Runnable() { @Override public void run() { try { OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream()); File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME)); GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile)); boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE); List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId)); JobConf jobConf = new JobConf(conf); RecordReader reader; InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class), jobConf); for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) { LOG.info("Epoch " + (j + 1) + " starting..."); for (int i = 0, len = inputs.size(); i < len; i++) { LOG.info("split " + (i + 1) + " is handling..."); reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL); Object key = reader.createKey(); Object value = reader.createValue(); Boolean finished = false; while (!finished) { try { finished = !reader.next(key, value); if (finished) { break; } osw.write(value.toString()); osw.write("\n"); if(j == 0 && isCache) { if(conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) { gos.write(value.toString().getBytes()); gos.write("\n".getBytes()); if((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) { LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M " + "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M."); gzFile.delete(); LOG.info("Local cache file deleted and will not use cache."); isCache = false; } } } } catch (EOFException e) { finished = true; e.printStackTrace(); } } reader.close(); LOG.info("split " + (i + 1) + " is finished."); } LOG.info("Epoch " + (j + 1) + " finished."); if(isCache) { break; } } osw.close(); gos.close(); } catch (Exception e) { LOG.warn("Exception in thread stdinRedirectThread"); e.printStackTrace(); } } }); stdinRedirectThread.start(); } List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) { LOG.info("Starting thread to redirect stream stdout of xlearning process"); final Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); JobConf jobConf = new JobConf(conf); jobConf.setOutputKeyClass(Text.class); jobConf.setOutputValueClass(Text.class); jobConf.setBoolean("mapred.output.compress", true); jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec"); jobConf.setOutputFormat(TextMultiOutputFormat.class); Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString()); FileSystem dfs = remotePath.getFileSystem(jobConf); jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString()); OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class), jobConf); outputFormat.checkOutputSpecs(dfs, jobConf); JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0); TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0); jobConf.set("mapred.tip.id", taId.getTaskID().toString()); jobConf.set("mapred.task.id", taId.toString()); jobConf.set("mapred.job.id", jobID.toString()); amClient.reportMapedTaskID(containerId, taId.toString()); RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL); String xlearningStreamResultLine; while ((xlearningStreamResultLine = reader.readLine()) != null) { writer.write(null, xlearningStreamResultLine); } writer.close(Reporter.NULL); reader.close(); dfs.close(); } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } else { LOG.info("Starting thread to redirect stdout of xlearning process"); Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); String xlearningStdoutLog; while ((xlearningStdoutLog = reader.readLine()) != null) { LOG.info(xlearningStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } LOG.info("Starting thread to redirect stderr of xlearning process"); Thread stderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream())); String xlearningStderrLog; while ((xlearningStderrLog = reader.readLine()) != null) { if (xlearningStderrLog.contains("reporter progress")) { heartbeatThread.setProgressLog(xlearningStderrLog); } else { LOG.info(xlearningStderrLog); } } } catch (Exception e) { LOG.warn("Error in thread stderrRedirectThread"); e.printStackTrace(); } } }); stderrRedirectThread.start(); heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING); //Start board process int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX); Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE); if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) { Socket boardReservedSocket = new Socket(); try { boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0)); } catch (IOException e) { LOG.error("Can not get available port"); reportFailedAndExit(); } String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString()); String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR); int boardPort = boardReservedSocket.getLocalPort(); String boardCommand; if ("TENSORFLOW".equals(xlearningAppType)) { int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL); boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir; } else { int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT); boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout; String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB); if (!(modelpb.equals("") || modelpb == null)) { boardCommand = boardCommand + " --model_pb=" + modelpb; } } String boardUrl = "http://" + boardHost + ":" + boardPort; LOG.info("Executing board command:" + boardCommand); boardReservedSocket.close(); try { final Process boardProcess = rt.exec(boardCommand, env); LOG.info("Starting thread to redirect stdout of board process"); Thread boardStdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream())); String boardStdoutLog; while ((boardStdoutLog = reader.readLine()) != null) { LOG.debug(boardStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread boardStdoutRedirectThread"); e.printStackTrace(); } } }); boardStdoutRedirectThread.start(); LOG.info("Starting thread to redirect stderr of board process"); Thread boardStderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream())); String boardStderrLog; while ((boardStderrLog = reader.readLine()) != null) { LOG.debug(boardStderrLog); } } catch (Exception e) { LOG.warn("Error in thread boardStderrRedirectThread"); e.printStackTrace(); } } }); boardStderrRedirectThread.start(); amClient.reportTensorBoardURL(boardUrl); LOG.info("Container index is " + index + ", report board url:" + boardUrl); } catch (Exception e) { LOG.error("Board Process failed. For more detail: " + e); } } int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL); if (this.role.equals(XLearningConstants.WORKER)) { this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess); LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId); containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId); containerReporter.setDaemon(true); containerReporter.start(); } int code = -1; while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) { Utilities.sleep(updateAppStatusInterval); try { code = xlearningProcess.exitValue(); } catch (IllegalThreadStateException e) { LOG.debug("XLearning Process is running"); } } if (this.role.equals(XLearningConstants.PS)) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } if (this.role.equals("server")) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } //As role is worker if (code == 0) { this.uploadOutputFiles(); } else { return false; } return true; }
#vulnerable code private Boolean run() throws IOException { try { if (this.role.equals(XLearningConstants.WORKER)) { prepareInputFiles(); } if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) { createLocalOutputDir(); } } catch (InterruptedException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } catch (ExecutionException e) { LOG.error("Container prepare inputs failed!", e); this.reportFailedAndExit(); } if ("TENSORFLOW".equals(xlearningAppType) && !single) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()), reservedSocket.getLocalPort(), this.role, this.index); while (true) { //TODO may be need encode use Base64 while used in Env this.clusterDef = amClient.getClusterDef(); if (this.clusterDef != null) { LOG.info("Cluster def is: " + this.clusterDef); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } } if (xlearningAppType.equals("DISTLIGHTGBM")) { LOG.info("Reserved available port: " + reservedSocket.getLocalPort()); this.lightGBMLocalPort = reservedSocket.getLocalPort(); InetAddress address = null; try { address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString())); } catch (UnknownHostException e) { LOG.info("acquire host ip failed " + e); reportFailedAndExit(); } String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort(); LOG.info("lightGBM ip port string is: " + ipPortStr); amClient.reportLightGbmIpPort(containerId, ipPortStr); String lightGBMIpPortStr; while (true) { //TODO may be need encode use Base64 while used in Env lightGBMIpPortStr = amClient.getLightGbmIpPortStr(); if (lightGBMIpPortStr != null) { LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr); break; } Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL)); } Type type = new TypeToken<ConcurrentHashMap<String, String>>() { }.getType(); ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type); PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8"); for (String str : map.keySet()) { writer.println(map.get(str)); } writer.close(); } String[] env = null; if ("TENSORFLOW".equals(xlearningAppType)) { if (single) { env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "PYTHONUNBUFFERED=1", XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index, XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role, XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } else { /** * set TF_CLUSTER_DEF in env * python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])" */ env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "PYTHONUNBUFFERED=1", XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef, XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index, XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role, XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } } else if (xlearningAppType.equals("MXNET")) { if (singleMx) { env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "PYTHONUNBUFFERED=1", XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } else if (xlearningAppType.equals("DISTXGBOOST")) { env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"), "DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"), "DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"), "PYTHONUNBUFFERED=1", "DMLC_TASK_ID=" + this.index, "DMLC_ROLE=" + this.role, XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } else if (xlearningAppType.equals("DISTLIGHTGBM")) { env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()), "LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort, "PYTHONUNBUFFERED=1", XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } else { String dmlcID; if (this.role.equals("worker")) { dmlcID = "DMLC_WORKER_ID"; } else { dmlcID = "DMLC_SERVER_ID"; } env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"), "DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"), "DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"), "DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"), "PYTHONUNBUFFERED=1", dmlcID + "=" + this.index, "DMLC_ROLE=" + this.role, XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } } else { env = new String[]{ "PATH=" + System.getenv("PATH"), "JAVA_HOME=" + System.getenv("JAVA_HOME"), "HADOOP_HOME=" + System.getenv("HADOOP_HOME"), "HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"), "LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") + "/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native", "CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"), "PYTHONUNBUFFERED=1", XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList }; } String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString()); LOG.info("Executing command:" + command); Runtime rt = Runtime.getRuntime(); //close reserved socket as tf will bind this port later this.reservedSocket.close(); final Process xlearningProcess = rt.exec(command, env); Date now = new Date(); heartbeatThread.setContainersStartTime(now.toString()); if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).equals("STREAM")) { LOG.info("Starting thread to redirect stdin of xlearning process"); Thread stdinRedirectThread = new Thread(new Runnable() { @Override public void run() { try { OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream()); List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId)); JobConf jobConf = new JobConf(conf); RecordReader reader; InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class), jobConf); for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) { LOG.info("Epoch " + (j + 1) + " starting..."); for (int i = 0, len = inputs.size(); i < len; i++) { LOG.info("split " + (i + 1) + " is handling..."); reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL); Object key = reader.createKey(); Object value = reader.createValue(); Boolean finished = false; while (!finished) { try { finished = !reader.next(key, value); if (finished) { break; } osw.write(value.toString()); osw.write("\n"); } catch (EOFException e) { finished = true; } } reader.close(); LOG.info("split " + (i + 1) + " is finished."); } LOG.info("Epoch " + (j + 1) + " finished."); } osw.close(); } catch (Exception e) { LOG.warn("Exception in thread stdinRedirectThread"); e.printStackTrace(); } } }); stdinRedirectThread.start(); } List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) { LOG.info("Starting thread to redirect stream stdout of xlearning process"); final Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation()); JobConf jobConf = new JobConf(conf); jobConf.setOutputKeyClass(Text.class); jobConf.setOutputValueClass(Text.class); jobConf.setBoolean("mapred.output.compress", true); jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec"); jobConf.setOutputFormat(TextMultiOutputFormat.class); Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString()); FileSystem dfs = remotePath.getFileSystem(jobConf); jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString()); OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class), jobConf); outputFormat.checkOutputSpecs(dfs, jobConf); JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0); TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0); jobConf.set("mapred.tip.id", taId.getTaskID().toString()); jobConf.set("mapred.task.id", taId.toString()); jobConf.set("mapred.job.id", jobID.toString()); amClient.reportMapedTaskID(containerId, taId.toString()); RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL); String xlearningStreamResultLine; while ((xlearningStreamResultLine = reader.readLine()) != null) { writer.write(null, xlearningStreamResultLine); } writer.close(Reporter.NULL); reader.close(); dfs.close(); } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } else { LOG.info("Starting thread to redirect stdout of xlearning process"); Thread stdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream())); String xlearningStdoutLog; while ((xlearningStdoutLog = reader.readLine()) != null) { LOG.info(xlearningStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread stdoutRedirectThread"); e.printStackTrace(); } } }); stdoutRedirectThread.start(); } LOG.info("Starting thread to redirect stderr of xlearning process"); Thread stderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream())); String xlearningStderrLog; while ((xlearningStderrLog = reader.readLine()) != null) { if (xlearningStderrLog.contains("reporter progress")) { heartbeatThread.setProgressLog(xlearningStderrLog); } else { LOG.info(xlearningStderrLog); } } } catch (Exception e) { LOG.warn("Error in thread stderrRedirectThread"); e.printStackTrace(); } } }); stderrRedirectThread.start(); heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING); //Start board process int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX); Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE); if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) { Socket boardReservedSocket = new Socket(); try { boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0)); } catch (IOException e) { LOG.error("Can not get available port"); reportFailedAndExit(); } String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString()); String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR); int boardPort = boardReservedSocket.getLocalPort(); String boardCommand; if ("TENSORFLOW".equals(xlearningAppType)) { int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL); boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir; } else { int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT); boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout; String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB); if (!(modelpb.equals("") || modelpb == null)) { boardCommand = boardCommand + " --model_pb=" + modelpb; } } String boardUrl = "http://" + boardHost + ":" + boardPort; LOG.info("Executing borad command:" + boardCommand); boardReservedSocket.close(); try { final Process boardProcess = rt.exec(boardCommand, env); LOG.info("Starting thread to redirect stdout of board process"); Thread boardStdoutRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream())); String boardStdoutLog; while ((boardStdoutLog = reader.readLine()) != null) { LOG.debug(boardStdoutLog); } } catch (Exception e) { LOG.warn("Exception in thread boardStdoutRedirectThread"); e.printStackTrace(); } } }); boardStdoutRedirectThread.start(); LOG.info("Starting thread to redirect stderr of board process"); Thread boardStderrRedirectThread = new Thread(new Runnable() { @Override public void run() { try { BufferedReader reader; reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream())); String boardStderrLog; while ((boardStderrLog = reader.readLine()) != null) { LOG.debug(boardStderrLog); } } catch (Exception e) { LOG.warn("Error in thread boardStderrRedirectThread"); e.printStackTrace(); } } }); boardStderrRedirectThread.start(); amClient.reportTensorBoardURL(boardUrl); LOG.info("Container index is " + index + ", report board url:" + boardUrl); } catch (Exception e) { LOG.error("Board Process failed. For more detail: " + e); } } int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL); if (this.role.equals(XLearningConstants.WORKER)) { this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess); LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId); containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId); containerReporter.setDaemon(true); containerReporter.start(); } int code = -1; while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) { Utilities.sleep(updateAppStatusInterval); try { code = xlearningProcess.exitValue(); } catch (IllegalThreadStateException e) { LOG.debug("XLearning Process is running"); } } if (this.role.equals(XLearningConstants.PS)) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } if (this.role.equals("server")) { if (code == -1) { xlearningProcess.destroy(); return true; } else if (code == 0) { return true; } return false; } //As role is worker if (code == 0) { this.uploadOutputFiles(); } else { return false; } return true; } #location 234 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private void writeToFile(String message, String path) throws IOException { if(StringUtils.isEmpty(message) || StringUtils.isEmpty(path)){ return ; } PrintWriter out = null; try { out = new PrintWriter(new BufferedWriter(new FileWriter(path, true))); out.println(message); out.flush(); } finally { if( null != out ) { out.close(); } } }
#vulnerable code private void writeToFile(String message, String path) throws IOException{ if(StringUtils.isEmpty(message) || StringUtils.isEmpty(path)){ return ; } RandomAccessFile rf = new RandomAccessFile(path, "rw"); rf.seek(rf.length()); rf.write(message.getBytes()); rf.close(); } #location 4 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public static Configure toConfigure(String toString) { final Configure configure = new Configure(); final String[] pvs = split(toString, ";"); for (String pv : pvs) { try { final String[] stringSplitArray = split(pv, "="); final String p = stringSplitArray[0]; final String v = decode(stringSplitArray[1]); final Field field = getField(Configure.class, p); if( null != field ) { set(field, valueOf(field.getType(), v), configure); } } catch (Throwable t) { // } } return configure; }
#vulnerable code public static Configure toConfigure(String toString) { final Configure configure = new Configure(); final String[] pvs = split(toString, ";"); for (String pv : pvs) { try { final String[] stringSplitArray = split(pv, "="); final String p = stringSplitArray[0]; final String v = decode(stringSplitArray[1]); final Field field = getField(Configure.class, p); set(field, valueOf(field.getType(), v), configure); } catch (Throwable t) { // } } return configure; } #location 10 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public byte[] transform( ClassLoader loader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws IllegalClassFormatException { // 这里要再次过滤一次,为啥?因为在transform的过程中,有可能还会再诞生新的类 // 所以需要将之前需要转换的类集合传递下来,再次进行判断 if (!matchingClasses.contains(classBeingRedefined)) { return null; } final ClassReader cr; // 首先先检查是否在缓存中存在Class字节码 // 因为要支持多人协作,存在多人同时增强的情况 final byte[] byteOfClassInCache = classBytesCache.get(classBeingRedefined); if (null != byteOfClassInCache) { cr = new ClassReader(byteOfClassInCache); } // 如果没有命中缓存,则从原始字节码开始增强 else { cr = new ClassReader(classfileBuffer); } // 字节码增强 final ClassWriter cw = new ClassWriter(cr, /*COMPUTE_FRAMES |*/ COMPUTE_MAXS); try { // 生成增强字节码 cr.accept(new AdviceWeaver(adviceId, isTracing, cr.getClassName(), methodNameMatcher, affect, cw), SKIP_FRAMES); final byte[] enhanceClassByteArray = cw.toByteArray(); // 生成成功,推入缓存 classBytesCache.put(classBeingRedefined, enhanceClassByteArray); // 成功计数 affect.cCnt(1); // // dump // final java.io.OutputStream os = new java.io.FileOutputStream(new java.io.File("/tmp/AgentTest.class")); // os.write(enhanceClassByteArray); // os.flush(); // os.close(); return enhanceClassByteArray; } catch (Throwable t) { if (logger.isLoggable(WARNING)) { logger.log(WARNING, format("transform class[%s] failed. ClassLoader=%s;", className, loader), t); } } return null; }
#vulnerable code public byte[] transform( ClassLoader loader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws IllegalClassFormatException { // 这里要再次过滤一次,为啥?因为在transform的过程中,有可能还会再诞生新的类 // 所以需要将之前需要转换的类集合传递下来,再次进行判断 if (!matchingClasses.contains(classBeingRedefined)) { return null; } final ClassReader cr; // 首先先检查是否在缓存中存在Class字节码 // 因为要支持多人协作,存在多人同时增强的情况 final byte[] byteOfClassInCache = classBytesCache.get(classBeingRedefined); if (null != byteOfClassInCache) { cr = new ClassReader(byteOfClassInCache); } // 如果没有命中缓存,则从原始字节码开始增强 else { cr = new ClassReader(classfileBuffer); } // 字节码增强 final ClassWriter cw = new ClassWriter(cr, COMPUTE_FRAMES | COMPUTE_MAXS); try { // 生成增强字节码 cr.accept(new AdviceWeaver(adviceId, isTracing, cr.getClassName(), methodNameMatcher, affect, cw), SKIP_FRAMES); final byte[] enhanceClassByteArray = cw.toByteArray(); // 生成成功,推入缓存 classBytesCache.put(classBeingRedefined, enhanceClassByteArray); // 成功计数 affect.cCnt(1); // dump final java.io.OutputStream os = new java.io.FileOutputStream(new java.io.File("/tmp/AgentTest.class")); os.write(enhanceClassByteArray); os.flush(); os.close(); return enhanceClassByteArray; } catch (Throwable t) { if (logger.isLoggable(WARNING)) { logger.log(WARNING, format("transform class[%s] failed. ClassLoader=%s;", className, loader), t); } } return null; } #location 52 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public byte[] transform( ClassLoader loader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws IllegalClassFormatException { // 这里要再次过滤一次,为啥?因为在transform的过程中,有可能还会再诞生新的类 // 所以需要将之前需要转换的类集合传递下来,再次进行判断 if (!matchingClasses.contains(classBeingRedefined)) { return null; } final ClassReader cr; // 首先先检查是否在缓存中存在Class字节码 // 因为要支持多人协作,存在多人同时增强的情况 final byte[] byteOfClassInCache = classBytesCache.get(classBeingRedefined); if (null != byteOfClassInCache) { cr = new ClassReader(byteOfClassInCache); } // 如果没有命中缓存,则从原始字节码开始增强 else { cr = new ClassReader(classfileBuffer); } // 字节码增强 final ClassWriter cw = new ClassWriter(cr, COMPUTE_FRAMES | COMPUTE_MAXS); try { // 生成增强字节码 cr.accept(new AdviceWeaver(adviceId, cr.getClassName(), methodNameMatcher, affect, cw), EXPAND_FRAMES); final byte[] enhanceClassByteArray = cw.toByteArray(); // 生成成功,推入缓存 classBytesCache.put(classBeingRedefined, enhanceClassByteArray); // 成功计数 affect.cCnt(1); // // dump // final java.io.OutputStream os = new FileOutputStream(new java.io.File("/tmp/AgentTest.class")); // os.write(enhanceClassByteArray); // os.flush(); // os.close(); return enhanceClassByteArray; } catch (Throwable t) { if (logger.isLoggable(WARNING)) { logger.log(WARNING, format("transform class[%s] failed. ClassLoader=%s;", className, loader), t); } } return null; }
#vulnerable code public byte[] transform( ClassLoader loader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws IllegalClassFormatException { // 这里要再次过滤一次,为啥?因为在transform的过程中,有可能还会再诞生新的类 // 所以需要将之前需要转换的类集合传递下来,再次进行判断 if (!matchingClasses.contains(classBeingRedefined)) { return null; } final ClassReader cr; // 首先先检查是否在缓存中存在Class字节码 // 因为要支持多人协作,存在多人同时增强的情况 final byte[] byteOfClassInCache = classBytesCache.get(classBeingRedefined); if (null != byteOfClassInCache) { cr = new ClassReader(byteOfClassInCache); } // 如果没有命中缓存,则从原始字节码开始增强 else { cr = new ClassReader(classfileBuffer); } // 字节码增强 final ClassWriter cw = new ClassWriter(cr, COMPUTE_FRAMES | COMPUTE_MAXS); try { // 生成增强字节码 cr.accept(new AdviceWeaver(adviceId, cr.getClassName(), methodNameMatcher, affect, cw), EXPAND_FRAMES); final byte[] enhanceClassByteArray = cw.toByteArray(); // 生成成功,推入缓存 classBytesCache.put(classBeingRedefined, enhanceClassByteArray); // 成功计数 affect.cCnt(1); // dump final java.io.OutputStream os = new FileOutputStream(new java.io.File("/tmp/AgentTest.class")); os.write(enhanceClassByteArray); os.flush();; os.close(); return enhanceClassByteArray; } catch (Throwable t) { if (logger.isLoggable(WARNING)) { logger.log(WARNING, format("transform class[%s] failed. ClassLoader=%s;", className, loader), t); } } return null; } #location 52 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public void validate() { Map<String, Binding<?>> allBindings = linkEverything(); new ProblemDetector().detectProblems(allBindings.values()); }
#vulnerable code public void validate() { Map<String, Binding<?>> allBindings; synchronized (linker) { linkStaticInjections(); linkEntryPoints(); allBindings = linker.linkAll(); } new ProblemDetector().detectProblems(allBindings.values()); } #location 8 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public static void main(String[] args) throws Exception { if (args.length != 3) { printUsage(); return; } File manifestXml = new File(args[0]); String moduleName = args[1]; File baseDir = new File(args[2]); if (!manifestXml.exists()) { System.out.println("No such file: " + manifestXml); printUsage(); return; } if (!baseDir.isDirectory()) { System.out.println("No such directory: " + baseDir); printUsage(); return; } generate(manifestXml, moduleName, baseDir); }
#vulnerable code public static void main(String[] args) throws Exception { if (args.length != 3) { printUsage(); return; } File manifestXml = new File(args[0]); String moduleName = args[1]; File baseDir = new File(args[2]); if (!manifestXml.exists()) { System.out.println("No such file: " + manifestXml); printUsage(); return; } if (!baseDir.isDirectory()) { System.out.println("No such directory: " + baseDir); printUsage(); return; } ModuleGenerator moduleGenerator = new ModuleGenerator(); InputSource in = new InputSource(new FileInputStream(manifestXml)); Document document = moduleGenerator.manifestToDocument(in); File file = moduleGenerator.path(document, moduleName, baseDir); file.getParentFile().mkdirs(); JavaWriter out = new JavaWriter(new OutputStreamWriter(new FileOutputStream(file), "UTF-8")); moduleGenerator.generate(document, moduleName, out); out.close(); } #location 28 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public boolean lock(long seckillId) { try { if (threadLock.get() == null) { Map<Long, InterProcessMutex> map = new HashMap(); map.put(seckillId,new InterProcessMutex(client,ROOT_LOCK_PATH+"/"+String.valueOf(seckillId))); threadLock.set(map); }else{ threadLock.get().get(seckillId).acquire(2L, TimeUnit.SECONDS); } return true; } catch (Exception e) { log.error(e.getMessage(), e); return false; } }
#vulnerable code public boolean lock(long seckillId) { try { if (lockMap.get(seckillId) == null) { lockMap.put(seckillId, new InterProcessMutex(client, ROOT_LOCK_PATH+"/"+String.valueOf(seckillId))); } lockMap.get(seckillId).acquire(2L, TimeUnit.SECONDS); return true; } catch (Exception e) { log.error(e.getMessage(), e); return false; } } #location 6 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private String uploadGoodsPhoto(CommonsMultipartFile file) throws IOException { final String s = "/Users/heng/java学习/"; String path = s + file.getOriginalFilename(); FileOutputStream fos = null; InputStream is = null; try { String filePath = s; File file_tmp = new File(filePath); if (!file_tmp.exists() && !file_tmp.mkdirs()) { throw new HengException("dir create error!"); } fos = new FileOutputStream(path); is = file.getInputStream(); int b; while ((b = is.read()) != -1) { fos.write(b); } fos.flush(); } catch (IOException e) { logger.error("error message is:", e); throw new HengException("上传文件异常"); } finally { if (fos!=null){ fos.close(); } if (is!=null){ is.close(); } } return path; }
#vulnerable code private String uploadGoodsPhoto(CommonsMultipartFile file) { final String s = "/Users/heng/java学习/"; String path = s + file.getOriginalFilename(); try { String filePath = s; File file_tmp = new File(filePath); if (!file_tmp.exists() && !file_tmp.mkdirs()) { throw new HengException("dir create error!"); } FileOutputStream fos = new FileOutputStream(path); InputStream is = file.getInputStream(); int b = 0; while ((b = is.read()) != -1) { fos.write(b); } fos.flush(); fos.close(); is.close(); } catch (IOException e) { throw new HengException("上传文件异常"); } return path; } #location 19 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public static void main(String[] args) { SpringApplication.run(GoodsKillRpcServiceApplication.class); }
#vulnerable code public static void main(String[] args) { log.info(">>>>> goodsKill-rpc-service 正在启动 <<<<<"); AbstractApplicationContext context= new ClassPathXmlApplicationContext( "classpath*:META-INF/spring/spring-*.xml"); // 程序退出前优雅关闭JVM context.registerShutdownHook(); context.start(); log.info(">>>>> goodsKill-rpc-service 启动完成 <<<<<"); } #location 7 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public boolean lock(long seckillId) { try { Map<Long, InterProcessMutex> map; String rootLockPath = "/goodskill"; Map<Long, InterProcessMutex> processMutexMap = threadLock.get(); if (processMutexMap.get(seckillId) == null) { processMutexMap.put(seckillId, new InterProcessMutex(client, rootLockPath + "/" + seckillId)); } boolean acquire = processMutexMap.get(seckillId).acquire(5000L, TimeUnit.MILLISECONDS); if (log.isDebugEnabled()) { log.debug("成功获取到zk锁,秒杀id{}", seckillId); } return acquire; } catch (Exception e) { log.warn("获取zk锁异常:{}", e.getMessage()); return false; } }
#vulnerable code public boolean lock(long seckillId) { try { Map<Long, InterProcessMutex> map; String rootLockPath = "/goodskill"; if (threadLock.get() == null) { map = new ConcurrentHashMap(); map.put(seckillId, new InterProcessMutex(client, rootLockPath + "/" + seckillId)); threadLock.set(map); } else { if (threadLock.get().get(seckillId) == null) { map = threadLock.get(); map.put(seckillId, new InterProcessMutex(client, rootLockPath + "/" + seckillId)); } } boolean acquire = threadLock.get().get(seckillId).acquire(5000L, TimeUnit.MILLISECONDS); if (log.isDebugEnabled()) { log.debug("成功获取到zk锁,秒杀id{}", seckillId); } return acquire; } catch (Exception e) { log.warn("获取zk锁异常:{}", e.getMessage()); return false; } } #location 15 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public static void main(String[] args) throws IOException { logger.info(">>>>> goodsKill-rpc-service 正在启动 <<<<<"); AbstractApplicationContext context= new ClassPathXmlApplicationContext( "classpath*:META-INF/spring/spring-*.xml"); //程序退出前优雅关闭JVM context.registerShutdownHook(); context.start(); logger.info(">>>>> goodsKill-rpc-service 启动完成 <<<<<"); }
#vulnerable code public static void main(String[] args) throws IOException { logger.info(">>>>> goodsKill-rpc-service 正在启动 <<<<<"); ClassPathXmlApplicationContext context= new ClassPathXmlApplicationContext( "classpath*:META-INF/spring/spring-*.xml"); context.start(); System.in.read(); logger.info(">>>>> goodsKill-rpc-service 启动完成 <<<<<"); } #location 5 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test(expected = NullPointerException.class) public void testCreateNull() { new TemplateList(null,(String[]) null); }
#vulnerable code @Test(expected = NullPointerException.class) public void testCreateNull() { new PatternList((String[]) null); } #location 3 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public void atExpr(Expr expr) throws CompileError { // array access, member access, // (unary) +, (unary) -, ++, --, !, ~ int token = expr.getOperator(); ASTree oprand = expr.oprand1(); if (token == '.') { String member = ((Symbol)expr.oprand2()).get(); if (member.equals("length")) atArrayLength(expr); else if (member.equals("class")) atClassObject(expr); // .class else atFieldRead(expr); } else if (token == MEMBER) { // field read String member = ((Symbol)expr.oprand2()).get(); if (member.equals("class")) atClassObject(expr); // .class else atFieldRead(expr); } else if (token == ARRAY) atArrayRead(oprand, expr.oprand2()); else if (token == PLUSPLUS || token == MINUSMINUS) atPlusPlus(token, oprand, expr); else if (token == '!') booleanExpr(expr); else if (token == CALL) // method call fatal(); else { oprand.accept(this); if (!isConstant(expr, token, oprand)) if (token == '-' || token == '~') if (CodeGen.isP_INT(exprType)) exprType = INT; // type may be BYTE, ... } }
#vulnerable code public void atExpr(Expr expr) throws CompileError { // array access, member access, // (unary) +, (unary) -, ++, --, !, ~ int token = expr.getOperator(); ASTree oprand = expr.oprand1(); if (token == '.') { String member = ((Symbol)expr.oprand2()).get(); if (member.equals("length")) atArrayLength(expr); else if (member.equals("class")) atClassObject(expr); // .class else atFieldRead(expr); } else if (token == MEMBER) { // field read String member = ((Symbol)expr.oprand2()).get(); if (member.equals("class")) atClassObject(expr); // .class else atFieldRead(expr); } else if (token == ARRAY) atArrayRead(oprand, expr.oprand2()); else if (token == PLUSPLUS || token == MINUSMINUS) atPlusPlus(token, oprand, expr); else if (token == '!') booleanExpr(expr); else if (token == CALL) // method call fatal(); else { expr.oprand1().accept(this); if (token == '-' || token == '~') if (CodeGen.isP_INT(exprType)) exprType = INT; // type may be BYTE, ... } } #location 24 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code protected CtField fieldAccess(ASTree expr) throws CompileError { CtField f = null; boolean is_static = false; if (expr instanceof Member) { String name = ((Member)expr).get(); try { f = thisClass.getField(name); } catch (NotFoundException e) { // EXPR might be part of a static member access? throw new NoFieldException(name, expr); } is_static = Modifier.isStatic(f.getModifiers()); if (!is_static) if (inStaticMethod) throw new CompileError( "not available in a static method: " + name); else bytecode.addAload(0); // this } else if (expr instanceof Expr) { Expr e = (Expr)expr; int op = e.getOperator(); if (op == MEMBER) { f = lookupJavaField(((Symbol)e.oprand1()).get(), (Symbol)e.oprand2()); is_static = true; } else if (op == '.') { try { e.oprand1().accept(this); if (exprType == CLASS && arrayDim == 0) f = lookupJvmField(className, (Symbol)e.oprand2()); else badLvalue(); is_static = Modifier.isStatic(f.getModifiers()); if (is_static) bytecode.addOpcode(POP); } catch (NoFieldException nfe) { if (nfe.getExpr() != e.oprand1()) throw nfe; Symbol fname = (Symbol)e.oprand2(); // it should be a static field. try { f = lookupJvmField(nfe.getField(), fname); is_static = true; } catch (CompileError ce) { // EXPR might be part of a qualified class name. throw new NoFieldException(nfe.getField() + "/" + fname.get(), expr); } } } else badLvalue(); } else badLvalue(); resultStatic = is_static; return f; }
#vulnerable code protected CtField fieldAccess(ASTree expr) throws CompileError { CtField f = null; boolean is_static = false; if (expr instanceof Member) { String name = ((Member)expr).get(); try { f = thisClass.getField(name); } catch (NotFoundException e) { // EXPR might be part of a static member access? throw new NoFieldException(name, expr); } is_static = Modifier.isStatic(f.getModifiers()); if (!is_static) if (inStaticMethod) throw new CompileError( "not available in a static method: " + name); else bytecode.addAload(0); // this } else if (expr instanceof Expr) { Expr e = (Expr)expr; int op = e.getOperator(); if (op == MEMBER) { f = lookupField((ASTList)e.oprand1(), (Symbol)e.oprand2()); is_static = true; } else if (op == '.') { try { e.oprand1().accept(this); if (exprType == CLASS && arrayDim == 0) f = lookupField(className, (Symbol)e.oprand2()); else badLvalue(); is_static = Modifier.isStatic(f.getModifiers()); if (is_static) bytecode.addOpcode(POP); } catch (NoFieldException nfe) { if (nfe.getExpr() != e.oprand1()) throw nfe; Symbol fname = (Symbol)e.oprand2(); // it should be a static field. try { f = lookupField(nfe.getField(), fname); is_static = true; } catch (CompileError ce) { // EXPR might be part of a qualified class name. throw new NoFieldException(nfe.getField() + "/" + fname.get(), expr); } } } else badLvalue(); } else badLvalue(); resultStatic = is_static; return f; } #location 33 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public InputStream openClassfile(String classname) { try { URLConnection con = openClassfile0(classname); if (con != null) return con.getInputStream(); } catch (IOException e) {} return null; // not found }
#vulnerable code public InputStream openClassfile(String classname) { try { if (packageName == null || classname.startsWith(packageName)) { String jarname = directory + classname.replace('.', '/') + ".class"; URLConnection con = fetchClass0(hostname, port, jarname); return con.getInputStream(); } } catch (IOException e) {} return null; // not found } #location 7 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code protected CtField fieldAccess(ASTree expr) throws CompileError { if (expr instanceof Member) { Member mem = (Member)expr; String name = mem.get(); try { CtField f = thisClass.getField(name); if (Modifier.isStatic(f.getModifiers())) mem.setField(f); return f; } catch (NotFoundException e) { // EXPR might be part of a static member access? throw new NoFieldException(name, expr); } } else if (expr instanceof Expr) { Expr e = (Expr)expr; int op = e.getOperator(); if (op == MEMBER) { Member mem = (Member)e.oprand2(); CtField f = resolver.lookupField(((Symbol)e.oprand1()).get(), mem); mem.setField(f); return f; } else if (op == '.') try { e.oprand1().accept(this); if (exprType == CLASS && arrayDim == 0) return resolver.lookupFieldByJvmName(className, (Symbol)e.oprand2()); } catch (NoFieldException nfe) { if (nfe.getExpr() != e.oprand1()) throw nfe; /* EXPR should be a static field. * If EXPR might be part of a qualified class name, * lookupFieldByJvmName2() throws NoFieldException. */ Member fname = (Member)e.oprand2(); String jvmClassName = nfe.getField(); CtField f = resolver.lookupFieldByJvmName2(jvmClassName, fname, expr); e.setOperator(MEMBER); e.setOprand1(new Symbol(MemberResolver.jvmToJavaName( jvmClassName))); fname.setField(f); return f; } } throw new CompileError("bad filed access"); }
#vulnerable code protected CtField fieldAccess(ASTree expr) throws CompileError { if (expr instanceof Member) { String name = ((Member)expr).get(); try { return thisClass.getField(name); } catch (NotFoundException e) { // EXPR might be part of a static member access? throw new NoFieldException(name, expr); } } else if (expr instanceof Expr) { Expr e = (Expr)expr; int op = e.getOperator(); if (op == MEMBER) return resolver.lookupField(((Symbol)e.oprand1()).get(), (Symbol)e.oprand2()); else if (op == '.') try { e.oprand1().accept(this); if (exprType == CLASS && arrayDim == 0) return resolver.lookupFieldByJvmName(className, (Symbol)e.oprand2()); } catch (NoFieldException nfe) { if (nfe.getExpr() != e.oprand1()) throw nfe; /* EXPR should be a static field. * If EXPR might be part of a qualified class name, * lookupFieldByJvmName2() throws NoFieldException. */ Symbol fname = (Symbol)e.oprand2(); return resolver.lookupFieldByJvmName2(nfe.getField(), fname, expr); } } throw new CompileError("bad filed access"); } #location 16 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public void atBinExpr(BinExpr expr) throws CompileError { int token = expr.getOperator(); int k = CodeGen.lookupBinOp(token); if (k >= 0) { /* arithmetic operators: +, -, *, /, %, |, ^, &, <<, >>, >>> */ if (token == '+') { Expr e = atPlusExpr(expr); if (e != null) { /* String concatenation has been translated into * an expression using StringBuffer. */ e = CallExpr.makeCall(Expr.make('.', e, new Member("toString")), null); expr.setOprand1(e); expr.setOprand2(null); // <---- look at this! className = jvmJavaLangString; } } else { ASTree left = expr.oprand1(); ASTree right = expr.oprand2(); left.accept(this); int type1 = exprType; right.accept(this); if (!isConstant(expr, token, left, right)) computeBinExprType(expr, token, type1); } } else { /* equation: &&, ||, ==, !=, <=, >=, <, > */ booleanExpr(expr); } }
#vulnerable code public void atBinExpr(BinExpr expr) throws CompileError { int token = expr.getOperator(); int k = CodeGen.lookupBinOp(token); if (k >= 0) { /* arithmetic operators: +, -, *, /, %, |, ^, &, <<, >>, >>> */ if (token == '+') { Expr e = atPlusExpr(expr); if (e != null) { /* String concatenation has been translated into * an expression using StringBuffer. */ e = CallExpr.makeCall(Expr.make('.', e, new Member("toString")), null); expr.setLeft(e); expr.setOprand2(null); // <---- look at this! className = jvmJavaLangString; } } else { expr.oprand1().accept(this); int type1 = exprType; expr.oprand2().accept(this); computeBinExprType(expr, token, type1); } } else { /* equation: &&, ||, ==, !=, <=, >=, <, > */ booleanExpr(expr); } } #location 23 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public void renameClass(Map classnames) { LongVector v = items; int size = numOfItems; classes = new HashMap(classes.size() * 2); for (int i = 1; i < size; ++i) { ConstInfo ci = (ConstInfo)v.elementAt(i); ci.renameClass(this, classnames); ci.makeHashtable(this); } }
#vulnerable code public void renameClass(Map classnames) { LongVector v = items; int size = numOfItems; for (int i = 1; i < size; ++i) ((ConstInfo)v.elementAt(i)).renameClass(this, classnames); } #location 5 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code protected CtField fieldAccess(ASTree expr) throws CompileError { CtField f = null; boolean is_static = false; if (expr instanceof Member) { String name = ((Member)expr).get(); try { f = thisClass.getField(name); } catch (NotFoundException e) { // EXPR might be part of a static member access? throw new NoFieldException(name, expr); } is_static = Modifier.isStatic(f.getModifiers()); if (!is_static) if (inStaticMethod) throw new CompileError( "not available in a static method: " + name); else bytecode.addAload(0); // this } else if (expr instanceof Expr) { Expr e = (Expr)expr; int op = e.getOperator(); if (op == MEMBER) { f = lookupJavaField(((Symbol)e.oprand1()).get(), (Symbol)e.oprand2()); is_static = true; } else if (op == '.') { try { e.oprand1().accept(this); if (exprType == CLASS && arrayDim == 0) f = lookupJvmField(className, (Symbol)e.oprand2()); else badLvalue(); is_static = Modifier.isStatic(f.getModifiers()); if (is_static) bytecode.addOpcode(POP); } catch (NoFieldException nfe) { if (nfe.getExpr() != e.oprand1()) throw nfe; Symbol fname = (Symbol)e.oprand2(); // it should be a static field. try { f = lookupJvmField(nfe.getField(), fname); is_static = true; } catch (CompileError ce) { // EXPR might be part of a qualified class name. throw new NoFieldException(nfe.getField() + "/" + fname.get(), expr); } } } else badLvalue(); } else badLvalue(); resultStatic = is_static; return f; }
#vulnerable code protected CtField fieldAccess(ASTree expr) throws CompileError { CtField f = null; boolean is_static = false; if (expr instanceof Member) { String name = ((Member)expr).get(); try { f = thisClass.getField(name); } catch (NotFoundException e) { // EXPR might be part of a static member access? throw new NoFieldException(name, expr); } is_static = Modifier.isStatic(f.getModifiers()); if (!is_static) if (inStaticMethod) throw new CompileError( "not available in a static method: " + name); else bytecode.addAload(0); // this } else if (expr instanceof Expr) { Expr e = (Expr)expr; int op = e.getOperator(); if (op == MEMBER) { f = lookupField((ASTList)e.oprand1(), (Symbol)e.oprand2()); is_static = true; } else if (op == '.') { try { e.oprand1().accept(this); if (exprType == CLASS && arrayDim == 0) f = lookupField(className, (Symbol)e.oprand2()); else badLvalue(); is_static = Modifier.isStatic(f.getModifiers()); if (is_static) bytecode.addOpcode(POP); } catch (NoFieldException nfe) { if (nfe.getExpr() != e.oprand1()) throw nfe; Symbol fname = (Symbol)e.oprand2(); // it should be a static field. try { f = lookupField(nfe.getField(), fname); is_static = true; } catch (CompileError ce) { // EXPR might be part of a qualified class name. throw new NoFieldException(nfe.getField() + "/" + fname.get(), expr); } } } else badLvalue(); } else badLvalue(); resultStatic = is_static; return f; } #location 26 #vulnerability type NULL_DEREFERENCE