repo
stringlengths 1
191
⌀ | file
stringlengths 23
351
| code
stringlengths 0
5.32M
| file_length
int64 0
5.32M
| avg_line_length
float64 0
2.9k
| max_line_length
int64 0
288k
| extension_type
stringclasses 1
value |
---|---|---|---|---|---|---|
cqengine | cqengine-master/code/src/main/java/com/googlecode/cqengine/query/parser/common/valuetypes/BooleanParser.java | /**
* Copyright 2012-2015 Niall Gallagher
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.googlecode.cqengine.query.parser.common.valuetypes;
import com.googlecode.cqengine.query.parser.common.ValueParser;
/**
* @author Niall Gallagher
*/
public class BooleanParser extends ValueParser<Boolean> {
static final String TRUE_STR = Boolean.TRUE.toString();
static final String FALSE_STR = Boolean.FALSE.toString();
@Override
public Boolean parse(Class<? extends Boolean> valueType, String stringValue) {
if (TRUE_STR.equalsIgnoreCase(stringValue)) {
return true;
}
else if (FALSE_STR.equalsIgnoreCase(stringValue)) {
return false;
}
else {
throw new IllegalStateException("Could not parse value as boolean: " + stringValue);
}
}
}
| 1,363 | 32.268293 | 96 | java |
cqengine | cqengine-master/code/src/main/java/com/googlecode/cqengine/query/parser/common/valuetypes/DoubleParser.java | /**
* Copyright 2012-2015 Niall Gallagher
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.googlecode.cqengine.query.parser.common.valuetypes;
import com.googlecode.cqengine.query.parser.common.ValueParser;
/**
* @author Niall Gallagher
*/
public class DoubleParser extends ValueParser<Double> {
@Override
public Double parse(Class<? extends Double> valueType, String stringValue) {
return Double.valueOf(stringValue);
}
}
| 973 | 31.466667 | 80 | java |
cqengine | cqengine-master/code/src/main/java/com/googlecode/cqengine/query/parser/common/valuetypes/FloatParser.java | /**
* Copyright 2012-2015 Niall Gallagher
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.googlecode.cqengine.query.parser.common.valuetypes;
import com.googlecode.cqengine.query.parser.common.ValueParser;
/**
* @author Niall Gallagher
*/
public class FloatParser extends ValueParser<Float> {
@Override
public Float parse(Class<? extends Float> valueType, String stringValue) {
return Float.valueOf(stringValue);
}
}
| 968 | 31.3 | 78 | java |
cqengine | cqengine-master/code/src/main/java/com/googlecode/cqengine/query/parser/common/valuetypes/CharacterParser.java | /**
* Copyright 2012-2015 Niall Gallagher
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.googlecode.cqengine.query.parser.common.valuetypes;
import com.googlecode.cqengine.query.parser.common.ValueParser;
/**
* @author Niall Gallagher
*/
public class CharacterParser extends ValueParser<Character> {
@Override
public Character parse(Class<? extends Character> valueType, String stringValue) {
if (stringValue.length() != 1) {
throw new IllegalArgumentException();
}
return stringValue.charAt(0);
}
}
| 1,080 | 31.757576 | 86 | java |
cqengine | cqengine-master/code/src/main/java/com/googlecode/cqengine/query/parser/common/valuetypes/BigDecimalParser.java | /**
* Copyright 2012-2015 Niall Gallagher
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.googlecode.cqengine.query.parser.common.valuetypes;
import com.googlecode.cqengine.query.parser.common.ValueParser;
import java.math.BigDecimal;
/**
* @author Niall Gallagher
*/
public class BigDecimalParser extends ValueParser<BigDecimal> {
@Override
public BigDecimal parse(Class<? extends BigDecimal> valueType, String stringValue) {
return new BigDecimal(stringValue);
}
}
| 1,019 | 30.875 | 88 | java |
cqengine | cqengine-master/code/src/main/java/com/googlecode/cqengine/query/parser/sql/SQLParser.java | /**
* Copyright 2012-2015 Niall Gallagher
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.googlecode.cqengine.query.parser.sql;
import com.googlecode.cqengine.attribute.Attribute;
import com.googlecode.cqengine.query.parser.common.InvalidQueryException;
import com.googlecode.cqengine.query.parser.common.ParseResult;
import com.googlecode.cqengine.query.parser.common.QueryParser;
import com.googlecode.cqengine.query.parser.sql.grammar.SQLGrammarLexer;
import com.googlecode.cqengine.query.parser.sql.grammar.SQLGrammarParser;
import com.googlecode.cqengine.query.parser.sql.support.FallbackValueParser;
import com.googlecode.cqengine.query.parser.sql.support.SQLAntlrListener;
import com.googlecode.cqengine.query.parser.sql.support.StringParser;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.tree.ParseTreeWalker;
import java.util.Map;
/**
* A parser for SQL queries.
*
* @author Niall Gallagher
*/
public class SQLParser<O> extends QueryParser<O> {
public SQLParser(Class<O> objectType) {
super(objectType);
StringParser stringParser = new StringParser();
super.registerValueParser(String.class, stringParser);
super.registerFallbackValueParser(new FallbackValueParser(stringParser));
}
@Override
public ParseResult<O> parse(String query) {
try {
if (query == null) {
throw new IllegalArgumentException("Query was null");
}
SQLGrammarLexer lexer = new SQLGrammarLexer(new ANTLRInputStream(query));
lexer.removeErrorListeners();
lexer.addErrorListener(SYNTAX_ERROR_LISTENER);
CommonTokenStream tokens = new CommonTokenStream(lexer);
SQLGrammarParser parser = new SQLGrammarParser(tokens);
parser.removeErrorListeners();
parser.addErrorListener(SYNTAX_ERROR_LISTENER);
SQLGrammarParser.StartContext queryContext = parser.start();
ParseTreeWalker walker = new ParseTreeWalker();
SQLAntlrListener<O> listener = new SQLAntlrListener<O>(this);
walker.walk(listener, queryContext);
return new ParseResult<O>(listener.getParsedQuery(), listener.getQueryOptions());
}
catch (InvalidQueryException e) {
throw e;
}
catch (Exception e) {
throw new InvalidQueryException("Failed to parse query", e);
}
}
/**
* Creates a new SQLParser for the given POJO class.
* @param pojoClass The type of object stored in the collection
* @return a new SQLParser for the given POJO class
*/
public static <O> SQLParser<O> forPojo(Class<O> pojoClass) {
return new SQLParser<O>(pojoClass);
}
/**
* Creates a new SQLParser for the given POJO class, and registers the given attributes with it.
* @param pojoClass The type of object stored in the collection
* @param attributes The attributes to register with the parser
* @return a new SQLParser for the given POJO class
*/
public static <O> SQLParser<O> forPojoWithAttributes(Class<O> pojoClass, Map<String, ? extends Attribute<O, ?>> attributes) {
SQLParser<O> parser = forPojo(pojoClass);
parser.registerAttributes(attributes);
return parser;
}
}
| 3,837 | 38.163265 | 129 | java |
cqengine | cqengine-master/code/src/main/java/com/googlecode/cqengine/query/parser/sql/support/FallbackValueParser.java | /**
* Copyright 2012-2015 Niall Gallagher
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.googlecode.cqengine.query.parser.sql.support;
import com.googlecode.cqengine.query.parser.common.ValueParser;
import java.lang.reflect.Method;
/**
* Parses values using a static {@code valueOf()} method in the type's class.
* <p/>
* Created by npgall on 25/05/2015.
*/
public class FallbackValueParser extends ValueParser<Object> {
final StringParser stringParser;
public FallbackValueParser(StringParser stringParser) {
this.stringParser = stringParser;
}
@Override
protected Object parse(Class<?> valueType, String stringValue) {
try {
stringValue = stringParser.parse(String.class, stringValue);
Method valueOf = valueType.getMethod("valueOf", String.class);
return valueType.cast(valueOf.invoke(null, stringValue));
}
catch (Exception e) {
throw new IllegalStateException("Failed to parse value using a valueOf() method in class '" + valueType.getName() + "': " + stringValue, e);
}
}
}
| 1,629 | 33.680851 | 152 | java |
cqengine | cqengine-master/code/src/main/java/com/googlecode/cqengine/query/parser/sql/support/StringParser.java | /**
* Copyright 2012-2015 Niall Gallagher
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.googlecode.cqengine.query.parser.sql.support;
import com.googlecode.cqengine.query.parser.common.ValueParser;
/**
* @author Niall Gallagher
*/
public class StringParser extends ValueParser<String> {
@Override
public String parse(Class<? extends String> valueType, String stringValue) {
return stripQuotes(stringValue);
}
public static String stripQuotes(String stringValue) {
int length = stringValue.length();
// Strip leading and trailing single quotes...
if (length >= 2 && stringValue.charAt(0) == '\'' && stringValue.charAt(length - 1) == '\'') {
stringValue = stringValue.substring(1, length - 1);
}
// Convert double single quotes (which is how a single quote is escaped), to one single quote...
stringValue = stringValue.replace("''", "'");
return stringValue;
}
}
| 1,491 | 35.390244 | 104 | java |
cqengine | cqengine-master/code/src/main/java/com/googlecode/cqengine/query/parser/sql/support/SQLAntlrListener.java | /**
* Copyright 2012-2015 Niall Gallagher
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.googlecode.cqengine.query.parser.sql.support;
import com.googlecode.cqengine.attribute.Attribute;
import com.googlecode.cqengine.query.Query;
import com.googlecode.cqengine.query.QueryFactory;
import com.googlecode.cqengine.query.logical.And;
import com.googlecode.cqengine.query.logical.Or;
import com.googlecode.cqengine.query.option.AttributeOrder;
import com.googlecode.cqengine.query.option.OrderByOption;
import com.googlecode.cqengine.query.option.QueryOptions;
import com.googlecode.cqengine.query.parser.common.QueryParser;
import com.googlecode.cqengine.query.parser.sql.grammar.SQLGrammarBaseListener;
import com.googlecode.cqengine.query.parser.sql.grammar.SQLGrammarParser;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.tree.ParseTree;
import java.util.*;
import static com.googlecode.cqengine.query.parser.common.ParserUtils.*;
/**
* @author Niall Gallagher
*/
public class SQLAntlrListener<O> extends SQLGrammarBaseListener {
/*
NOTE: this class depends on classes auto-generated by the antlr4-maven-plugin.
Run "mvn clean compile" to generate those classes.
*/
protected final QueryParser<O> queryParser;
// A map of parent context, to parsed child queries belonging to that context...
protected final Map<ParserRuleContext, Collection<Query<O>>> childQueries = new HashMap<ParserRuleContext, Collection<Query<O>>>();
// The parsed orderBy clause, if found...
protected OrderByOption<O> orderByOption = null;
protected int numQueriesEncountered = 0;
protected int numQueriesParsed = 0;
public SQLAntlrListener(QueryParser<O> queryParser) {
this.queryParser = queryParser;
}
// ======== Handler methods for each type of query defined in the antlr grammar... ========
@Override
public void exitAndQuery(SQLGrammarParser.AndQueryContext ctx) {
addParsedQuery(ctx, new And<O>(childQueries.get(ctx)));
}
@Override
public void exitOrQuery(SQLGrammarParser.OrQueryContext ctx) {
addParsedQuery(ctx, new Or<O>(childQueries.get(ctx)));
}
@Override
public void exitNotQuery(SQLGrammarParser.NotQueryContext ctx) {
addParsedQuery(ctx, QueryFactory.not(childQueries.get(ctx).iterator().next()));
}
@Override
public void exitEqualQuery(SQLGrammarParser.EqualQueryContext ctx) {
Attribute<O, Object> attribute = queryParser.getAttribute(ctx.attributeName(), Object.class);
Object value = queryParser.parseValue(attribute, ctx.queryParameter());
addParsedQuery(ctx, QueryFactory.equal(attribute, value));
}
@Override
public void exitNotEqualQuery(SQLGrammarParser.NotEqualQueryContext ctx) {
Attribute<O, Object> attribute = queryParser.getAttribute(ctx.attributeName(), Object.class);
Object value = queryParser.parseValue(attribute, ctx.queryParameter());
addParsedQuery(ctx, QueryFactory.not(QueryFactory.equal(attribute, value)));
}
@Override
@SuppressWarnings("unchecked")
public void exitLessThanOrEqualToQuery(SQLGrammarParser.LessThanOrEqualToQueryContext ctx) {
Attribute<O, Comparable> attribute = queryParser.getAttribute(ctx.attributeName(), Comparable.class);
Comparable value = queryParser.parseValue(attribute, ctx.queryParameter());
addParsedQuery(ctx, QueryFactory.lessThanOrEqualTo(attribute, value));
}
@Override
@SuppressWarnings("unchecked")
public void exitLessThanQuery(SQLGrammarParser.LessThanQueryContext ctx) {
Attribute<O, Comparable> attribute = queryParser.getAttribute(ctx.attributeName(), Comparable.class);
Comparable value = queryParser.parseValue(attribute, ctx.queryParameter());
addParsedQuery(ctx, QueryFactory.lessThan(attribute, value));
}
@Override
@SuppressWarnings("unchecked")
public void exitGreaterThanOrEqualToQuery(SQLGrammarParser.GreaterThanOrEqualToQueryContext ctx) {
Attribute<O, Comparable> attribute = queryParser.getAttribute(ctx.attributeName(), Comparable.class);
Comparable value = queryParser.parseValue(attribute, ctx.queryParameter());
addParsedQuery(ctx, QueryFactory.greaterThanOrEqualTo(attribute, value));
}
@Override
@SuppressWarnings("unchecked")
public void exitGreaterThanQuery(SQLGrammarParser.GreaterThanQueryContext ctx) {
Attribute<O, Comparable> attribute = queryParser.getAttribute(ctx.attributeName(), Comparable.class);
Comparable value = queryParser.parseValue(attribute, ctx.queryParameter());
addParsedQuery(ctx, QueryFactory.greaterThan(attribute, value));
}
@Override
@SuppressWarnings("unchecked")
public void exitBetweenQuery(SQLGrammarParser.BetweenQueryContext ctx) {
Attribute<O, Comparable> attribute = queryParser.getAttribute(ctx.attributeName(), Comparable.class);
List<? extends ParseTree> queryParameters = ctx.queryParameter();
Comparable lowerValue = queryParser.parseValue(attribute, queryParameters.get(0));
Comparable upperValue = queryParser.parseValue(attribute, queryParameters.get(1));
addParsedQuery(ctx, QueryFactory.between(attribute, lowerValue, upperValue));
}
@Override
@SuppressWarnings("unchecked")
public void exitNotBetweenQuery(SQLGrammarParser.NotBetweenQueryContext ctx) {
Attribute<O, Comparable> attribute = queryParser.getAttribute(ctx.attributeName(), Comparable.class);
List<? extends ParseTree> queryParameters = ctx.queryParameter();
Comparable lowerValue = queryParser.parseValue(attribute, queryParameters.get(0));
Comparable upperValue = queryParser.parseValue(attribute, queryParameters.get(1));
addParsedQuery(ctx, QueryFactory.not(QueryFactory.between(attribute, lowerValue, upperValue)));
}
@Override
public void exitInQuery(SQLGrammarParser.InQueryContext ctx) {
Attribute<O, Object> attribute = queryParser.getAttribute(ctx.attributeName(), Object.class);
List<? extends ParseTree> queryParameters = ctx.queryParameter();
Collection<Object> values = new ArrayList<Object>(queryParameters.size());
for (ParseTree queryParameter : queryParameters) {
Object value = queryParser.parseValue(attribute, queryParameter);
values.add(value);
}
addParsedQuery(ctx, QueryFactory.in(attribute, values));
}
@Override
public void exitNotInQuery(SQLGrammarParser.NotInQueryContext ctx) {
Attribute<O, Object> attribute = queryParser.getAttribute(ctx.attributeName(), Object.class);
List<? extends ParseTree> queryParameters = ctx.queryParameter();
Collection<Object> values = new ArrayList<Object>(queryParameters.size());
for (ParseTree queryParameter : queryParameters) {
Object value = queryParser.parseValue(attribute, queryParameter);
values.add(value);
}
addParsedQuery(ctx, QueryFactory.not(QueryFactory.in(attribute, values)));
}
@Override
public void exitStartsWithQuery(SQLGrammarParser.StartsWithQueryContext ctx) {
Attribute<O, String> attribute = queryParser.getAttribute(ctx.attributeName(), String.class);
String value = queryParser.parseValue(attribute, ctx.queryParameterTrailingPercent());
value = value.substring(0, value.length() - 1);
addParsedQuery(ctx, QueryFactory.startsWith(attribute, value));
}
@Override
public void exitIsPrefixOfQuery(SQLGrammarParser.IsPrefixOfQueryContext ctx) {
Attribute<O, String> attribute = queryParser.getAttribute(ctx.attributeName(), String.class);
String value = queryParser.parseValue(attribute, ctx.queryParameter());
addParsedQuery(ctx, QueryFactory.isPrefixOf(attribute, value));
}
@Override
public void exitEndsWithQuery(SQLGrammarParser.EndsWithQueryContext ctx) {
Attribute<O, String> attribute = queryParser.getAttribute(ctx.attributeName(), String.class);
String value = queryParser.parseValue(attribute, ctx.queryParameterLeadingPercent());
value = value.substring(1, value.length());
addParsedQuery(ctx, QueryFactory.endsWith(attribute, value));
}
@Override
public void exitContainsQuery(SQLGrammarParser.ContainsQueryContext ctx) {
Attribute<O, String> attribute = queryParser.getAttribute(ctx.attributeName(), String.class);
String value = queryParser.parseValue(attribute, ctx.queryParameterLeadingAndTrailingPercent());
value = value.substring(1, value.length() - 1);
addParsedQuery(ctx, QueryFactory.contains(attribute, value));
}
@Override
public void exitHasQuery(SQLGrammarParser.HasQueryContext ctx) {
Attribute<O, Object> attribute = queryParser.getAttribute(ctx.attributeName(), Object.class);
addParsedQuery(ctx, QueryFactory.has(attribute));
}
@Override
public void exitNotHasQuery(SQLGrammarParser.NotHasQueryContext ctx) {
Attribute<O, Object> attribute = queryParser.getAttribute(ctx.attributeName(), Object.class);
addParsedQuery(ctx, QueryFactory.not(QueryFactory.has(attribute)));
}
/** This handler is called for all queries, allows us to validate that no handlers are missing. */
@Override
public void exitQuery(SQLGrammarParser.QueryContext ctx) {
numQueriesEncountered++;
validateAllQueriesParsed(numQueriesEncountered, numQueriesParsed);
}
@Override
public void exitOrderByClause(SQLGrammarParser.OrderByClauseContext ctx) {
List<AttributeOrder<O>> attributeOrders = new ArrayList<AttributeOrder<O>>();
for (SQLGrammarParser.AttributeOrderContext orderContext : ctx.attributeOrder()) {
Attribute<O, Comparable> attribute = queryParser.getAttribute(orderContext.attributeName(), Comparable.class);
boolean descending = orderContext.direction() != null && orderContext.direction().K_DESC() != null;
attributeOrders.add(new AttributeOrder<O>(attribute, descending));
}
this.orderByOption = QueryFactory.orderBy(attributeOrders);
}
// ======== Utility methods... ========
/**
* Adds the given query to a list of child queries which have not yet been wrapped in a parent query.
*/
void addParsedQuery(ParserRuleContext currentContext, Query<O> parsedQuery) {
// Retrieve the possibly null parent query...
ParserRuleContext parentContext = getParentContextOfType(currentContext, getAndOrNotContextClasses());
Collection<Query<O>> childrenOfParent = this.childQueries.get(parentContext);
if (childrenOfParent == null) {
childrenOfParent = new ArrayList<Query<O>>();
this.childQueries.put(parentContext, childrenOfParent); // parentContext will be null if this is root query
}
childrenOfParent.add(parsedQuery);
numQueriesParsed++;
}
/**
* Can be called when parsing has finished, to retrieve the parsed query.
*/
public Query<O> getParsedQuery() {
Collection<Query<O>> rootQuery = childQueries.get(null);
if (rootQuery == null) {
// There was no WHERE clause...
return QueryFactory.all(this.queryParser.getObjectType());
}
validateExpectedNumberOfChildQueries(1, rootQuery.size());
return rootQuery.iterator().next();
}
/**
* Can be called when parsing has finished, to retrieve the {@link QueryOptions}, which may include an
* {@link OrderByOption} if found in the string query.
*
* @return The parsed {@link QueryOptions}
*/
public QueryOptions getQueryOptions() {
OrderByOption<O> orderByOption = this.orderByOption;
return orderByOption != null ? QueryFactory.queryOptions(orderByOption) : QueryFactory.noQueryOptions();
}
protected Class[] getAndOrNotContextClasses() {
return new Class[] {SQLGrammarParser.AndQueryContext.class, SQLGrammarParser.OrQueryContext.class, SQLGrammarParser.NotQueryContext.class};
}
}
| 12,749 | 45.875 | 147 | java |
cqengine | cqengine-master/code/src/main/java/com/googlecode/cqengine/query/parser/sql/support/DateMathParser.java | /**
* Copyright 2012-2015 Niall Gallagher
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.googlecode.cqengine.query.parser.sql.support;
import java.util.Date;
import java.util.Locale;
import java.util.TimeZone;
/**
* A variant of {@link com.googlecode.cqengine.query.parser.cqn.support.DateMathParser} which can be used with
* the SQL dialect.
*
* @author npgall
*/
public class DateMathParser extends com.googlecode.cqengine.query.parser.cqn.support.DateMathParser {
public DateMathParser() {
}
public DateMathParser(Date now) {
super(now);
}
public DateMathParser(TimeZone timeZone, Locale locale) {
super(timeZone, locale);
}
public DateMathParser(TimeZone timeZone, Locale locale, Date now) {
super(timeZone, locale, now);
}
@Override
protected String stripQuotes(String stringValue) {
return StringParser.stripQuotes(stringValue);
}
}
| 1,454 | 28.1 | 110 | java |
cqengine | cqengine-master/code/src/main/java/com/googlecode/cqengine/metadata/MetadataEngine.java | /**
* Copyright 2012-2019 Niall Gallagher
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.googlecode.cqengine.metadata;
import com.googlecode.cqengine.IndexedCollection;
import com.googlecode.cqengine.attribute.Attribute;
import com.googlecode.cqengine.index.AttributeIndex;
import com.googlecode.cqengine.index.Index;
import com.googlecode.cqengine.index.support.KeyStatisticsAttributeIndex;
import com.googlecode.cqengine.index.support.KeyStatisticsIndex;
import com.googlecode.cqengine.index.support.SortedKeyStatisticsAttributeIndex;
import com.googlecode.cqengine.index.support.SortedKeyStatisticsIndex;
import com.googlecode.cqengine.query.option.QueryOptions;
import java.util.function.Consumer;
import java.util.function.Supplier;
/**
* Provides access to metadata for attributes, obtained from indexes which have been added to the collection.
* Example metadata:
* <ul>
* <li>
* frequency distributions (counts of the occurrences of each distinct value of an attribute in the collection)
* </li>
* <li>
* count the number of objects in the collection whose attribute has a specific value
* </li>
* <li>
* obtain all of the distinct values of an attribute
* </li>
* </ul>
* <p>
* The {@link #getAttributeMetadata(Attribute)} method returns an {@link AttributeMetadata} accessor object,
* which can provide access to basic metadata as discussed above for a given attribute, in unsorted order.
* This requires in advance, an index which implements the {@link KeyStatisticsAttributeIndex} interface,
* to be added to the collection on the given attribute. Most indexes implement that interface.
* <p>
* The {@link #getSortedAttributeMetadata(Attribute)} method returns a {@link SortedAttributeMetadata} accessor object,
* which can provide access to additional metadata and in sorted order. It allows attribute values to be traversed
* in ascending or descending order, and it supports range queries. This requires in advance, an index which implements
* the {@link SortedKeyStatisticsAttributeIndex} interface, to be added to the collection on the given attribute.
* That interface is implemented by many indexes.
* <p>
* This object can be accessed by calling {@link IndexedCollection#getMetadataEngine()}.
*/
public class MetadataEngine<O> {
private final IndexedCollection<O> indexedCollection;
private final Supplier<QueryOptions> openResourcesHandler;
private final Consumer<QueryOptions> closeResourcesHandler;
public MetadataEngine(IndexedCollection<O> indexedCollection, Supplier<QueryOptions> openResourcesHandler, Consumer<QueryOptions> closeResourcesHandler) {
this.indexedCollection = indexedCollection;
this.openResourcesHandler = openResourcesHandler;
this.closeResourcesHandler = closeResourcesHandler;
}
/**
* Returns an {@link AttributeMetadata} accessor object, which can provide access to basic metadata
* for a given attribute, in unsorted order.
* <p>
* This requires in advance, an index which implements the {@link KeyStatisticsAttributeIndex} interface,
* to be added to the collection on the given attribute.
*
* @param attribute The attribute for which metadata is required
* @return an {@link AttributeMetadata} accessor object
* @throws IllegalStateException if no suitable index has been added to the collection
*/
public <A> AttributeMetadata<A, O> getAttributeMetadata(Attribute<O, A> attribute) {
@SuppressWarnings("unchecked")
KeyStatisticsIndex<A, O> index = getIndexOnAttribute(KeyStatisticsAttributeIndex.class, attribute);
return new AttributeMetadata<>(index, openResourcesHandler, closeResourcesHandler);
}
/**
* Returns a {@link SortedAttributeMetadata} accessor object, which can provide access to metadata in sorted order
* for a given attribute. It allows attribute values to be traversed in ascending or descending order, and it
* supports range queries.
* <p>
* This requires in advance, an index which implements the {@link SortedKeyStatisticsAttributeIndex} interface,
* to be added to the collection on the given attribute.
*
* @param attribute The attribute for which metadata is required
* @return an {@link AttributeMetadata} accessor object
* @throws IllegalStateException if no suitable index has been added to the collection
*/
public <A extends Comparable<A>> SortedAttributeMetadata<A, O> getSortedAttributeMetadata(Attribute<O, A> attribute) {
@SuppressWarnings("unchecked")
SortedKeyStatisticsIndex<A, O> index = getIndexOnAttribute(SortedKeyStatisticsAttributeIndex.class, attribute);
return new SortedAttributeMetadata<>(index, openResourcesHandler, closeResourcesHandler);
}
private <A, I extends AttributeIndex<A, O>> I getIndexOnAttribute(Class<I> indexType, Attribute<O, A> attribute) {
for (Index<O> index : indexedCollection.getIndexes()) {
if (indexType.isAssignableFrom(index.getClass())) {
I attributeIndex = indexType.cast(index);
if (attributeIndex.getAttribute().equals(attribute)) {
return attributeIndex;
}
}
}
throw new IllegalStateException("A " + indexType.getSimpleName() + " has not been added to the collection, and must be added first, to enable metadata to be examined for attribute: " + attribute);
}
}
| 6,046 | 50.245763 | 204 | java |
cqengine | cqengine-master/code/src/main/java/com/googlecode/cqengine/metadata/KeyFrequency.java | package com.googlecode.cqengine.metadata;
/**
* Represents the frequency (i.e. the count of the number of occurrences of) a given key.
*/
public interface KeyFrequency<A> {
A getKey();
int getFrequency();
}
| 220 | 17.416667 | 89 | java |
cqengine | cqengine-master/code/src/main/java/com/googlecode/cqengine/metadata/SortedAttributeMetadata.java | /**
* Copyright 2012-2019 Niall Gallagher
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.googlecode.cqengine.metadata;
import com.googlecode.cqengine.IndexedCollection;
import com.googlecode.cqengine.attribute.Attribute;
import com.googlecode.cqengine.index.support.KeyValue;
import com.googlecode.cqengine.index.support.SortedKeyStatisticsAttributeIndex;
import com.googlecode.cqengine.index.support.SortedKeyStatisticsIndex;
import com.googlecode.cqengine.query.option.QueryOptions;
import java.util.function.Consumer;
import java.util.function.Supplier;
import java.util.stream.Stream;
/**
* Provides access to metadata in sorted order for a given attribute. Allows attribute values to be traversed
* in ascending or descending order, and supports range queries. See {@link MetadataEngine} for more details.
* <p>
* This requires in advance, an index which implements the {@link SortedKeyStatisticsAttributeIndex} interface,
* to be added to the collection on the given attribute.
* <p>
* This object can be accessed first by calling {@link IndexedCollection#getMetadataEngine()} to access the
* {@link MetadataEngine}, and then by calling {@link MetadataEngine#getSortedAttributeMetadata(Attribute)} for a given
* attribute.
*/
public class SortedAttributeMetadata<A extends Comparable<A>, O> extends AttributeMetadata<A, O> {
private final SortedKeyStatisticsIndex<A, O> index;
SortedAttributeMetadata(SortedKeyStatisticsIndex<A, O> index, Supplier<QueryOptions> openResourcesHandler, Consumer<QueryOptions> closeResourcesHandler) {
super(index, openResourcesHandler, closeResourcesHandler);
this.index = index;
}
/**
* Returns the frequencies of distinct keys (a.k.a. attribute values) in the index, in ascending order.
* <p>
* The {@link KeyFrequency} objects encapsulate a key (a.k.a. attribute value), and the frequency (or count)
* of how many objects in the collection match that key.
*/
@Override
public Stream<KeyFrequency<A>> getFrequencyDistribution() {
return super.getFrequencyDistribution();
}
/**
* Returns the frequencies of distinct keys (a.k.a. attribute values) in the index, in descending order.
* <p>
* The {@link KeyFrequency} objects encapsulate a key (a.k.a. attribute value), and the frequency (or count)
* of how many objects in the collection match that key.
*/
public Stream<KeyFrequency<A>> getFrequencyDistributionDescending() {
QueryOptions queryOptions = openResources();
return asKeyFrequencyStream(queryOptions, index.getStatisticsForDistinctKeysDescending(queryOptions));
}
/**
* Returns the distinct keys in the index, in ascending order.
*/
@Override
public Stream<A> getDistinctKeys() {
return super.getDistinctKeys();
}
/**
* Returns the distinct keys in the index, in descending order.
*/
public Stream<A> getDistinctKeysDescending() {
QueryOptions queryOptions = openResources();
return asStream(queryOptions, index.getDistinctKeysDescending(queryOptions));
}
/**
* Returns the distinct keys in the index within an optional range, in ascending order.
*
* @param lowerBound The lower bound for the keys returned, or null if no lower bound should be applied
* @param lowerInclusive true if the lowerBound is inclusive, false if exclusive
* @param upperBound The upper bound for the keys returned, or null if no upper bound should be applied
* @param upperInclusive true if the lowerBound is inclusive, false if exclusive
* @return The distinct keys in the index within an optional range, in ascending order
*/
public Stream<A> getDistinctKeys(A lowerBound, boolean lowerInclusive, A upperBound, boolean upperInclusive) {
QueryOptions queryOptions = openResources();
return asStream(queryOptions, index.getDistinctKeys(lowerBound, lowerInclusive, upperBound, upperInclusive, queryOptions));
}
/**
* Returns the distinct keys in the index within an optional range, in descending order.
*
* @param lowerBound The lower bound for the keys returned, or null if no lower bound should be applied
* @param lowerInclusive true if the lowerBound is inclusive, false if exclusive
* @param upperBound The upper bound for the keys returned, or null if no upper bound should be applied
* @param upperInclusive true if the lowerBound is inclusive, false if exclusive
* @return The distinct keys in the index within an optional range, in descending order
*/
public Stream<A> getDistinctKeysDescending(A lowerBound, boolean lowerInclusive, A upperBound, boolean upperInclusive) {
QueryOptions queryOptions = openResources();
return asStream(queryOptions, index.getDistinctKeysDescending(lowerBound, lowerInclusive, upperBound, upperInclusive, queryOptions));
}
/**
* Returns the count of distinct keys in the index.
*/
@Override
public Integer getCountOfDistinctKeys() {
return super.getCountOfDistinctKeys();
}
/**
* Returns the number of objects in the index which match the given key.
*/
@Override
public Integer getCountForKey(A key) {
return super.getCountForKey(key);
}
/**
* Returns the keys (a.k.a. attribute values) and the objects they match in the index, in ascending order of key.
* <p>
* Note the same key will be returned multiple times if more than one object has the same key. Also the same object
* might be returned multiple times, each time for a different key, if the index is built on a multi-value attribute.
*
* @return The keys and objects they match in the index, in ascending order of key
*/
@Override
public Stream<KeyValue<A, O>> getKeysAndValues() {
return super.getKeysAndValues();
}
/**
* Returns the keys (a.k.a. attribute values) and the objects they match in the index, in descending order of key.
* <p>
* Note the same key will be returned multiple times if more than one object has the same key. Also the same object
* might be returned multiple times, each time for a different key, if the index is built on a multi-value attribute.
*
* @return The keys and objects they match in the index, in descending order of key
*/
public Stream<KeyValue<A, O>> getKeysAndValuesDescending() {
QueryOptions queryOptions = openResources();
return asStream(queryOptions, index.getKeysAndValuesDescending(queryOptions));
}
/**
* Returns the keys (a.k.a. attribute values) within an optional range, and the objects they match in the index,
* in ascending order of key.
* <p>
* Note the same key will be returned multiple times if more than one object has the same key. Also the same object
* might be returned multiple times, each time for a different key, if the index is built on a multi-value attribute.
*
* @param lowerBound The lower bound for the keys returned, or null if no lower bound should be applied
* @param lowerInclusive true if the lowerBound is inclusive, false if exclusive
* @param upperBound The upper bound for the keys returned, or null if no upper bound should be applied
* @param upperInclusive true if the lowerBound is inclusive, false if exclusive
*
* @return The keys and objects they match in the index, in ascending order of key
*/
public Stream<KeyValue<A, O>> getKeysAndValues(A lowerBound, boolean lowerInclusive, A upperBound, boolean upperInclusive) {
QueryOptions queryOptions = openResources();
return asStream(queryOptions, index.getKeysAndValues(lowerBound, lowerInclusive, upperBound, upperInclusive, queryOptions));
}
/**
* Returns the keys (a.k.a. attribute values) within an optional range, and the objects they match in the index,
* in descending order of key.
* <p>
* Note the same key will be returned multiple times if more than one object has the same key. Also the same object
* might be returned multiple times, each time for a different key, if the index is built on a multi-value attribute.
*
* @param lowerBound The lower bound for the keys returned, or null if no lower bound should be applied
* @param lowerInclusive true if the lowerBound is inclusive, false if exclusive
* @param upperBound The upper bound for the keys returned, or null if no upper bound should be applied
* @param upperInclusive true if the lowerBound is inclusive, false if exclusive
*
* @return The keys and objects they match in the index, in descending order of key
*/
public Stream<KeyValue<A, O>> getKeysAndValuesDescending(A lowerBound, boolean lowerInclusive, A upperBound, boolean upperInclusive) {
QueryOptions queryOptions = openResources();
return asStream(queryOptions, index.getKeysAndValuesDescending(lowerBound, lowerInclusive, upperBound, upperInclusive, queryOptions));
}
}
| 9,637 | 47.432161 | 158 | java |
cqengine | cqengine-master/code/src/main/java/com/googlecode/cqengine/metadata/AttributeMetadata.java | package com.googlecode.cqengine.metadata;
import com.googlecode.cqengine.IndexedCollection;
import com.googlecode.cqengine.attribute.Attribute;
import com.googlecode.cqengine.index.support.*;
import com.googlecode.cqengine.query.option.QueryOptions;
import java.util.Spliterator;
import java.util.Spliterators;
import java.util.function.Consumer;
import java.util.function.Supplier;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
/**
* Provides access to basic metadata for a given attribute, in unsorted order.
* <p>
* This requires in advance, an index which implements the {@link KeyStatisticsAttributeIndex} interface,
* to be added to the collection on the given attribute. See {@link MetadataEngine} for more details.
* <p>
* This object can be accessed first by calling {@link IndexedCollection#getMetadataEngine()} to access the
* {@link MetadataEngine}, and then by calling {@link MetadataEngine#getAttributeMetadata(Attribute)} for a given
* attribute.
*/
public class AttributeMetadata<A, O> {
private final KeyStatisticsIndex<A, O> index;
private final Supplier<QueryOptions> openResourcesHandler;
private final Consumer<QueryOptions> closeResourcesHandler;
AttributeMetadata(KeyStatisticsIndex<A, O> index, Supplier<QueryOptions> openResourcesHandler, Consumer<QueryOptions> closeResourcesHandler) {
this.index = index;
this.openResourcesHandler = openResourcesHandler;
this.closeResourcesHandler = closeResourcesHandler;
}
/**
* Returns the frequencies of distinct keys (a.k.a. attribute values) in the index.
* <p>
* The {@link KeyFrequency} objects encapsulate a key (a.k.a. attribute value), and the frequency (or count)
* of how many objects in the collection match that key.
*/
public Stream<KeyFrequency<A>> getFrequencyDistribution() {
QueryOptions queryOptions = openResources();
return asKeyFrequencyStream(queryOptions, index.getStatisticsForDistinctKeys(queryOptions));
}
/**
* Returns the distinct keys in the index.
*/
public Stream<A> getDistinctKeys() {
QueryOptions queryOptions = openResources();
return asStream(queryOptions, index.getDistinctKeys(queryOptions));
}
/**
* Returns the count of distinct keys in the index.
*/
public Integer getCountOfDistinctKeys() {
QueryOptions queryOptions = openResources();
try {
return index.getCountOfDistinctKeys(queryOptions);
}
finally {
closeResources(queryOptions);
}
}
/**
* Returns the number of objects in the index which match the given key.
*/
public Integer getCountForKey(A key) {
QueryOptions queryOptions = openResources();
try {
return index.getCountForKey(key, queryOptions);
}
finally {
closeResources(queryOptions);
}
}
/**
* Returns the keys (a.k.a. attribute values) and the objects they match in the index.
* <p>
* Note the same key will be returned multiple times if more than one object has the same key. Also the same object
* might be returned multiple times, each time for a different key, if the index is built on a multi-value attribute.
*
* @return The keys and objects they match in the index
*/
public Stream<KeyValue<A, O>> getKeysAndValues() {
QueryOptions queryOptions = openResources();
return asStream(queryOptions, index.getKeysAndValues(queryOptions));
}
@SuppressWarnings("unchecked")
protected Stream<KeyFrequency<A>> asKeyFrequencyStream(QueryOptions queryOptions, CloseableIterable<KeyStatistics<A>> iterable) {
Stream<? extends KeyFrequency<A>> keyStatisticsStream = asStream(queryOptions, iterable);
return (Stream<KeyFrequency<A>>) keyStatisticsStream;
}
protected <T> Stream<T> asStream(QueryOptions queryOptions, CloseableIterable<T> iterable) {
CloseableIterator<T> iterator = iterable.iterator();
Spliterator<T> spliterator = Spliterators.spliteratorUnknownSize(iterator, Spliterator.ORDERED);
return StreamSupport.stream(spliterator, false)
.onClose(iterator::close) // ..when the stream is closed, first close the iterator
.onClose(() -> closeResources(queryOptions)); // ..then close any other resources which were acquired
}
protected QueryOptions openResources() {
return openResourcesHandler.get();
}
protected void closeResources(QueryOptions queryOptions) {
closeResourcesHandler.accept(queryOptions);
}
}
| 4,676 | 38.974359 | 146 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/DynaMo/ModularityOptimizer_DynaMo.java | package org.dzhuang.dynamic.DynaMo;
/**
* ModularityOptimizer
*
* @author Ludo Waltman
* @author Nees Jan van Eck
* @author Di Zhuang
* @version 09/04/18
*/
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import it.unimi.dsi.fastutil.ints.Int2BooleanOpenHashMap;
import it.unimi.dsi.fastutil.ints.Int2DoubleOpenHashMap;
import it.unimi.dsi.fastutil.ints.Int2IntOpenHashMap;
import it.unimi.dsi.fastutil.ints.IntOpenHashSet;
public class ModularityOptimizer_DynaMo{
public static double resolution_default=1.0;
public static int nRandomStarts_default=10;
public static int nIterations_default=10000;
public static long randomSeed_default=0;
public static void main(String[] args) throws IOException, ClassNotFoundException{
// runDynamicModularity("Cit-HepPh", 31);
// runDynamicModularity("Cit-HepTh", 25);
// runDynamicModularity("dblp_coauthorship", 31);
// runDynamicModularity("facebook", 28);
// runDynamicModularity("flickr", 24);
// runDynamicModularity("youtube", 33);
// runDynamicModularity("networks_us-101", 3991);
// runDynamicModularity("networks_i-80", 3991);
// runDynamicModularity("networks_lankershim", 3991);
// runDynamicModularity("networks_peachtree", 3991);
// runDynamicModularity("networks_us-101", 1000);
// runDynamicModularity("networks_i-80", 1000);
// runDynamicModularity("networks_lankershim", 1000);
// runDynamicModularity("networks_peachtree", 1000);
}
public static void runDynamicModularity(String dataSet, int nbatch) throws IOException, ClassNotFoundException{
String DyNet="data/"+dataSet+"/ntwk2/";
String intNet="data/"+dataSet+"/inct/";
/**********************************************************************************/
// First time call Louvain
Network oldNetwork = Network.load(DyNet+"1");
double resolution2 = resolution_default / (2 * oldNetwork.totalEdgeWeight + oldNetwork.totalEdgeWeightSelfLinks);
Clustering clustering = null;
double maxModularity = Double.NEGATIVE_INFINITY;
Random random = new Random(randomSeed_default);
HashMap<String, Double> alpha2=new HashMap<String, Double>();
System.out.println("1 running");
double[] beta=null;
for (int i=0;i<nRandomStarts_default;i++){
VOSClusteringTechnique VOSClusteringTechnique = new VOSClusteringTechnique(oldNetwork, resolution2);
int j = 0;
boolean update = true;
do{
update = VOSClusteringTechnique.runLouvainAlgorithm(random);
j++;
}
while ((j < nIterations_default) && update);
double modularity = VOSClusteringTechnique.calcQualityFunction();
if (modularity > maxModularity){
clustering = VOSClusteringTechnique.getClustering();
maxModularity = modularity;
}
}
System.out.println("1 done");
writeOutputFile("data/"+dataSet+"/runDynamicModularity_"+dataSet+"_com_1", clustering);
VOSClusteringTechnique VOSClusteringTechnique_temporary = new VOSClusteringTechnique(oldNetwork, clustering, resolution2);
double modularity_temporary = VOSClusteringTechnique_temporary.calcQualityFunction();
if(modularity_temporary>maxModularity)
maxModularity=modularity_temporary;
alpha2=VOSClusteringTechnique_temporary.alpha2;
beta=VOSClusteringTechnique_temporary.beta;
PrintWriter pw=new PrintWriter(dataSet+"_modularity_runDynamicModularity");
/**********************************************************************************/
// DynaMo
for(int ibatch=2;ibatch<=nbatch;ibatch++){
System.out.println(ibatch+" running");
/**********************************************************************************/
// DynaMo-Initialization
/**********************************************************************************/
// newly formed small clusters
Int2IntOpenHashMap clusterInitialization2=new Int2IntOpenHashMap();
int clusterInitializationCNT=0;
// the set of clusters that all their vertices will be initialized as singleton clusters
IntOpenHashSet clusterSet2=new IntOpenHashSet();
Int2IntOpenHashMap nodeAdded_adjacentNode2=new Int2IntOpenHashMap();
Int2DoubleOpenHashMap nodeAdded2_edgeWeight2=new Int2DoubleOpenHashMap();
Int2BooleanOpenHashMap nodeAdded2_flag2=new Int2BooleanOpenHashMap();
/**********************************************************************************/
BufferedReader bufferedReader = new BufferedReader(new FileReader(intNet+ibatch));
String line="";
long t1=System.currentTimeMillis();
while ((line=bufferedReader.readLine())!=null){
String[] lines=line.split("\t");
String FLAG=lines[1];
int startNode=Integer.parseInt(lines[2]);
int endNode=Integer.parseInt(lines[3]);
double wt_new=0.0;
if(FLAG.equals("+"))
wt_new=(lines.length > 4) ? Double.parseDouble(lines[2]) : 1;
else if(FLAG.equals("-"))
wt_new=(lines.length > 4) ? Double.parseDouble(lines[2]) : -1;
else
wt_new=Double.parseDouble(lines[5]);
double wt=wt_new;
// newly added vertices
if(startNode>=oldNetwork.nNodes){ // both startNode and endNode are new
if(!nodeAdded_adjacentNode2.containsKey(startNode)){
nodeAdded_adjacentNode2.put(startNode, endNode);
nodeAdded2_edgeWeight2.put(startNode, wt);
nodeAdded2_flag2.put(startNode, true);
}
else if(nodeAdded2_edgeWeight2.get(startNode)<wt){
nodeAdded_adjacentNode2.replace(startNode, endNode);
nodeAdded2_edgeWeight2.replace(startNode, wt);
nodeAdded2_flag2.replace(startNode, true);
}
else if(nodeAdded2_edgeWeight2.get(startNode)==wt)
nodeAdded2_flag2.replace(startNode, false);
if(!nodeAdded_adjacentNode2.containsKey(endNode)){
nodeAdded_adjacentNode2.put(endNode, startNode);
nodeAdded2_edgeWeight2.put(endNode, wt);
nodeAdded2_flag2.put(endNode, true);
}
else if(nodeAdded2_edgeWeight2.get(endNode)<wt){
nodeAdded_adjacentNode2.replace(endNode, startNode);
nodeAdded2_edgeWeight2.replace(endNode, wt);
nodeAdded2_flag2.replace(endNode, true);
}
else if(nodeAdded2_edgeWeight2.get(endNode)==wt)
nodeAdded2_flag2.replace(endNode, false);
}
else if(endNode>=oldNetwork.nNodes){ // only endNode is new
if(!nodeAdded_adjacentNode2.containsKey(endNode)){
nodeAdded_adjacentNode2.put(endNode, startNode);
nodeAdded2_edgeWeight2.put(endNode, wt);
nodeAdded2_flag2.put(endNode, true);
}
else if(nodeAdded2_edgeWeight2.get(endNode)<wt){
nodeAdded_adjacentNode2.replace(endNode, startNode);
nodeAdded2_edgeWeight2.replace(endNode, wt);
nodeAdded2_flag2.replace(endNode, true);
}
else if(nodeAdded2_edgeWeight2.get(endNode)==wt)
nodeAdded2_flag2.replace(endNode, false);
clusterSet2.add(clustering.getCluster(startNode));
}
// old vertices
else{ // both startNode and endNode are old
int cN1=clustering.getCluster(startNode);
int cN2=clustering.getCluster(endNode);
// edge addition or edge weight increase
if(wt>0.0){
if(cN1==cN2){ // intra-community
clusterSet2.add(cN1);
if(!clusterInitialization2.containsKey(startNode) && !clusterInitialization2.containsKey(endNode)){
clusterInitialization2.put(startNode, clusterInitializationCNT);
clusterInitialization2.put(endNode, clusterInitializationCNT);
clusterInitializationCNT++;
}
else if(!clusterInitialization2.containsKey(startNode))
clusterInitialization2.put(startNode, clusterInitialization2.get(endNode));
else if(!clusterInitialization2.containsKey(endNode))
clusterInitialization2.put(endNode, clusterInitialization2.get(startNode));
}
else{ // cross-community
double m=oldNetwork.totalEdgeWeight;
double acN1=0;
if(alpha2.containsKey(cN1+"_"+cN1)){
acN1=alpha2.get(cN1+"_"+cN1);
}
double acN2=0;
if(alpha2.containsKey(cN2+"_"+cN2)){
acN2=alpha2.get(cN2+"_"+cN2);
}
double bcN1=beta[cN1];
double bcN2=beta[cN2];
double acNk=0;
if(cN1<=cN2)
if(alpha2.containsKey(cN1+"_"+cN2))
acNk=acN1+acN2+2*alpha2.get(cN1+"_"+cN2);
else
acNk=acN1+acN2;
else
if(alpha2.containsKey(cN2+"_"+cN1))
acNk=acN1+acN2+2*alpha2.get(cN2+"_"+cN1);
else
acNk=acN1+acN2;
double alpha22=acN1+acN2-acNk;
double beta2=bcN1+bcN2;
double delta1=2*m-alpha22-beta2;
double delta2=m*alpha22+bcN1*bcN2;
double value=(Math.sqrt(Math.pow(delta1, 2)+4*delta2)-delta1)*0.5;
double delta_W=wt;
if(delta_W>value){
clusterSet2.add(cN1);
clusterSet2.add(cN2);
if(!clusterInitialization2.containsKey(startNode) && !clusterInitialization2.containsKey(endNode)){
clusterInitialization2.put(startNode, clusterInitializationCNT);
clusterInitialization2.put(endNode, clusterInitializationCNT);
clusterInitializationCNT++;
}
else if(!clusterInitialization2.containsKey(startNode))
clusterInitialization2.put(startNode, clusterInitialization2.get(endNode));
else if(!clusterInitialization2.containsKey(endNode))
clusterInitialization2.put(endNode, clusterInitialization2.get(startNode));
}
}
}
// edge deletion or edge weight decrease
else if(wt<0.0 && cN1==cN2){ // intra-community
clusterSet2.add(cN1);
for(int vt:Arrays.copyOfRange(oldNetwork.neighbor, oldNetwork.firstNeighborIndex[startNode], oldNetwork.firstNeighborIndex[startNode + 1]))
clusterSet2.add(clustering.getCluster(vt));
for(int vt:Arrays.copyOfRange(oldNetwork.neighbor, oldNetwork.firstNeighborIndex[endNode], oldNetwork.firstNeighborIndex[endNode + 1]))
clusterSet2.add(clustering.getCluster(vt));
}
}
}
bufferedReader.close();
long t2=System.currentTimeMillis();
/**********************************************************************************/
Network newNetwork=Network.load(DyNet+ibatch);
/**********************************************************************************/
for(Map.Entry<Integer, Integer> entry : nodeAdded_adjacentNode2.int2IntEntrySet()) {
int startNode=(Integer) entry.getKey();
int endNode=(Integer) entry.getValue();
if(nodeAdded2_flag2.get(startNode))
if(!clusterInitialization2.containsKey(startNode) && !clusterInitialization2.containsKey(endNode)){
clusterInitialization2.put(startNode, clusterInitializationCNT);
clusterInitialization2.put(endNode, clusterInitializationCNT);
clusterInitializationCNT++;
}
else if(!clusterInitialization2.containsKey(startNode))
clusterInitialization2.put(startNode, clusterInitialization2.get(endNode));
else if(!clusterInitialization2.containsKey(endNode))
clusterInitialization2.put(endNode, clusterInitialization2.get(startNode));
}
// vertices become singleton communities
IntOpenHashSet singletonNodeSet2=new IntOpenHashSet();
// from certain clusters
for(int k=0;k<oldNetwork.nNodes;k++)
if(!clusterInitialization2.containsKey(k) && clusterSet2.contains(clustering.getCluster(k)))
singletonNodeSet2.add(k);
// from newly added vertices
for(int node : nodeAdded_adjacentNode2.keySet())
if(!clusterInitialization2.containsKey(node))
singletonNodeSet2.add(node);
// Re-organize cluster labels
Int2IntOpenHashMap clusterMap2=new Int2IntOpenHashMap ();
// newly initialized set of clusters
Clustering clustering2=new Clustering(newNetwork.nNodes);
int cnt=0;
for(int k=0;k<newNetwork.nNodes;k++)
if(k<oldNetwork.nNodes && !clusterSet2.contains(clustering.cluster[k])){
if(clusterMap2.containsKey(clustering.cluster[k]))
clustering2.cluster[k]=clusterMap2.get(clustering.cluster[k]);
else{
clustering2.cluster[k]=cnt;
clusterMap2.put(clustering.cluster[k], cnt);
cnt++;
}
}
else if(singletonNodeSet2.contains(k)){
clustering2.cluster[k]=cnt;
cnt++;
}
for(Map.Entry<Integer, Integer> entry : clusterInitialization2.int2IntEntrySet())
clustering2.cluster[entry.getKey()]=cnt+entry.getValue();
clustering2.nClusters=Arrays2.calcMaximum(clustering2.cluster) + 1;
/**********************************************************************************/
// The DynaMo Algorithm
resolution2 = resolution_default / (2 * newNetwork.totalEdgeWeight + newNetwork.totalEdgeWeightSelfLinks);
alpha2=new HashMap<String, Double>();
beta=null;
clustering=null;
double maxModularity2 = Double.NEGATIVE_INFINITY;
random = new Random(randomSeed_default);
long t=0;
for (int i=0;i<nRandomStarts_default;i++){
VOSClusteringTechnique VOSClusteringTechnique2 = new VOSClusteringTechnique(newNetwork, clustering2, resolution2);
int j = 0;
boolean update = true;
long t3=System.currentTimeMillis();
do{
update = VOSClusteringTechnique2.runLouvainAlgorithm(random);
/*if (clustering2.nClusters < newNetwork.nNodes)
update = VOSClusteringTechnique2.runLouvainAlgorithm2(random);
else
update = VOSClusteringTechnique2.runLouvainAlgorithm(random);*/
j++;
}
while ((j < nIterations_default) && update);
long t4=System.currentTimeMillis();
double modularity = VOSClusteringTechnique2.calcQualityFunction();
if (modularity > maxModularity2){
// next old clustering
clustering = VOSClusteringTechnique2.getClustering();
maxModularity2 = modularity;
}
t+=t4-t3;
}
/**********************************************************************************/
writeOutputFile("data/"+dataSet+"/runDynamicModularity_"+dataSet+"_com_"+ibatch, clustering);
VOSClusteringTechnique_temporary = new VOSClusteringTechnique(newNetwork, clustering, resolution2);
modularity_temporary = VOSClusteringTechnique_temporary.calcQualityFunction();
if(modularity_temporary>maxModularity)
maxModularity=modularity_temporary;
alpha2=VOSClusteringTechnique_temporary.alpha2;
beta=VOSClusteringTechnique_temporary.beta;
System.out.println(dataSet+"\t"+"runDynamicModularity"+"\t"+ibatch+"\t"+maxModularity2+"\t"+(t2-t1+t)+"\t"+(t2-t1)+"\t"+t);
pw.println(ibatch+"\t"+maxModularity2+"\t"+(t2-t1+t)+"\t"+(t2-t1)+"\t"+t);
// next old network
oldNetwork=new Network(newNetwork.nNodes, newNetwork.firstNeighborIndex, newNetwork.neighbor, newNetwork.edgeWeight);
}
pw.close();
}
private static void writeOutputFile(String fileName, Clustering clustering) throws IOException{
BufferedWriter bufferedWriter;
int i, nNodes;
nNodes = clustering.getNNodes();
clustering.orderClustersByNNodes();
bufferedWriter = new BufferedWriter(new FileWriter(fileName));
for (i = 0; i < nNodes; i++){
bufferedWriter.write(Integer.toString(clustering.getCluster(i)));
bufferedWriter.newLine();
}
bufferedWriter.close();
}
} | 17,094 | 45.078167 | 148 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/DynaMo/EdgeSort.java | package org.dzhuang.dynamic.DynaMo;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/**
* EdgeSort
*
* @author Di Zhuang
* @version 09/03/18
*/
public class EdgeSort {
public int node1;
public int node2;
public double edgeWeight;
public EdgeSort(int node1, int node2, double edgeWeight) {
this.node1 = node1;
this.node2 = node2;
this.edgeWeight = edgeWeight;
}
@Override
public String toString() {
return node1+","+node2+","+edgeWeight;
}
public static void order(List<EdgeSort> edge) {
Collections.sort(edge, new Comparator() {
public int compare(Object o1, Object o2) {
Integer a = ((EdgeSort) o1).node1;
Integer b = ((EdgeSort) o2).node1;
int sComp = a.compareTo(b);
if (sComp != 0) {
return sComp;
} else {
Integer c = ((EdgeSort) o1).node2;
Integer d = ((EdgeSort) o2).node2;
return c.compareTo(d);
}
}});
}
public static void main(String[] args) throws IOException {
ArrayList<EdgeSort> people = new ArrayList<EdgeSort>();
people.add(new EdgeSort(1, 2, 1.0));
order(people);
System.out.println(people);
}
} | 1,349 | 21.881356 | 60 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/DynaMo/DyPerm.java | package org.dzhuang.dynamic.DynaMo;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Random;
//TODO
public class DyPerm {
public static double resolution_default=1.0;
public static int nRandomStarts_default=10;
public static int nIterations_default=10000;
public static long randomSeed_default=0;
public static void main(String[] args) throws ClassNotFoundException, IOException {
// runDyPerm("Cit-HepPh", 31);
runDyPerm("Cit-HepTh", 25);
// runDyPerm("dblp_coauthorship", 31);
// runDyPerm("facebook", 28);
// runDyPerm("flickr", 24);
// runDyPerm("youtube", 33);
}
public static void runDyPerm(String dataSet, int nbatch) throws ClassNotFoundException, IOException {
String DyNet="data/"+dataSet+"/ntwk2/";
String intNet="data/"+dataSet+"/inct/";
/**********************************************************************************/
// First time call Louvain
Network oldNetwork = Network.load(DyNet+"1");
double resolution2 = resolution_default / (2 * oldNetwork.totalEdgeWeight + oldNetwork.totalEdgeWeightSelfLinks);
Clustering clustering = null;
double maxModularity = Double.NEGATIVE_INFINITY;
Random random = new Random(randomSeed_default);
HashMap<String, Double> alpha2=new HashMap<String, Double>();
System.out.println("1 running");
double[] beta=null;
for (int i=0;i<nRandomStarts_default;i++){
VOSClusteringTechnique VOSClusteringTechnique = new VOSClusteringTechnique(oldNetwork, resolution2);
int j = 0;
boolean update = true;
do{
update = VOSClusteringTechnique.runLouvainAlgorithm(random);
j++;
}
while ((j < nIterations_default) && update);
double modularity = VOSClusteringTechnique.calcQualityFunction();
if (modularity > maxModularity){
clustering = VOSClusteringTechnique.getClustering();
maxModularity = modularity;
}
}
System.out.println("1 done");
writeOutputFile("data/"+dataSet+"/runDyPerm_"+dataSet+"_com_1", clustering);
VOSClusteringTechnique VOSClusteringTechnique_temporary = new VOSClusteringTechnique(oldNetwork, clustering, resolution2);
double modularity_temporary = VOSClusteringTechnique_temporary.calcQualityFunction();
if(modularity_temporary>maxModularity)
maxModularity=modularity_temporary;
alpha2=VOSClusteringTechnique_temporary.alpha2;
beta=VOSClusteringTechnique_temporary.beta;
PrintWriter pw=new PrintWriter(dataSet+"_modularity_runDyPerm");
/**********************************************************************************/
// DyPerm
for(int ibatch=2;ibatch<=nbatch;ibatch++){
long t1=System.currentTimeMillis();
System.out.println(ibatch+" running");
Network newNetwork=Network.load(DyNet+ibatch);
Clustering clustering2=new Clustering(newNetwork.nNodes);
HashMap<Integer, HashSet<Integer>> clustering2Set=new HashMap<Integer, HashSet<Integer>>();
int maxClustering=Integer.MIN_VALUE;
for(int k=0;k<newNetwork.nNodes;k++)
if(k<oldNetwork.nNodes){
int oldC=clustering.cluster[k];
clustering2.cluster[k]=oldC;
if(!clustering2Set.containsKey(oldC)) {
HashSet<Integer> tmpSet=new HashSet<Integer>();
tmpSet.add(k);
clustering2Set.put(oldC, tmpSet);
}
else {
clustering2Set.get(oldC).add(k);
}
if(oldC>maxClustering)
maxClustering=oldC;
}
maxClustering=maxClustering+1;
/**********************************************************************************/
// edges added
ArrayList<String> edges_added=new ArrayList<String>();
// edges removed
ArrayList<String> edges_removed=new ArrayList<String>();
/**********************************************************************************/
BufferedReader bufferedReader = new BufferedReader(new FileReader(intNet+ibatch));
String line="";
HashSet<String> newNodeSet=new HashSet<String>();
while ((line=bufferedReader.readLine())!=null){
String[] lines=line.split("\t");
String FLAG=lines[1];
if(FLAG.equals("+")) {
edges_added.add(lines[2]+"\t"+lines[3]);
if(Integer.parseInt(lines[2])>=oldNetwork.nNodes) {
if(!newNodeSet.contains(lines[2])) {
clustering2.cluster[Integer.parseInt(lines[2])]=maxClustering;
if(!clustering2Set.containsKey(maxClustering)) {
HashSet<Integer> tmpSet=new HashSet<Integer>();
tmpSet.add(Integer.parseInt(lines[2]));
clustering2Set.put(maxClustering, tmpSet);
}
else {
clustering2Set.get(maxClustering).add(Integer.parseInt(lines[2]));
}
maxClustering++;
newNodeSet.add(lines[2]);
}
if(!newNodeSet.contains(lines[3])) {
clustering2.cluster[Integer.parseInt(lines[3])]=maxClustering;
if(!clustering2Set.containsKey(maxClustering)) {
HashSet<Integer> tmpSet=new HashSet<Integer>();
tmpSet.add(Integer.parseInt(lines[3]));
clustering2Set.put(maxClustering, tmpSet);
}
else {
clustering2Set.get(maxClustering).add(Integer.parseInt(lines[3]));
}
maxClustering++;
newNodeSet.add(lines[3]);
}
}
else if(Integer.parseInt(lines[3])>=oldNetwork.nNodes && !newNodeSet.contains(lines[3])) {
clustering2.cluster[Integer.parseInt(lines[3])]=maxClustering;
if(!clustering2Set.containsKey(maxClustering)) {
HashSet<Integer> tmpSet=new HashSet<Integer>();
tmpSet.add(Integer.parseInt(lines[3]));
clustering2Set.put(maxClustering, tmpSet);
}
else {
clustering2Set.get(maxClustering).add(Integer.parseInt(lines[3]));
}
maxClustering++;
newNodeSet.add(lines[3]);
}
}
else if(FLAG.equals("-"))
edges_removed.add(lines[2]+"\t"+lines[3]);
}
bufferedReader.close();
clustering2.nClusters=Arrays2.calcMaximum(clustering2.cluster) + 1;
clustering=edge_addition(newNetwork,clustering2, clustering2Set, edges_added);
writeOutputFile("data/"+dataSet+"/runDyPerm_"+dataSet+"_com_"+ibatch, clustering);
VOSClusteringTechnique_temporary = new VOSClusteringTechnique(newNetwork, clustering, resolution2);
modularity_temporary = VOSClusteringTechnique_temporary.calcQualityFunction();
long t2=System.currentTimeMillis();
System.out.println(dataSet+"\t"+"runDyPerm"+"\t"+ibatch+"\t"+modularity_temporary+"\t"+(t2-t1));
pw.println(ibatch+"\t"+modularity_temporary+"\t"+(t2-t1));
oldNetwork=new Network(newNetwork.nNodes, newNetwork.firstNeighborIndex, newNetwork.neighbor, newNetwork.edgeWeight);
}
pw.close();
}
public static Clustering edge_addition(Network newNetwork, Clustering clustering2, HashMap<Integer, HashSet<Integer>> clustering2Set, ArrayList<String> edges_added) {
for(String edg: edges_added) {
String[] lines=edg.split("\t");
int startNode=Integer.parseInt(lines[0]);
int endNode=Integer.parseInt(lines[1]);
int cN1=clustering2.getCluster(startNode);
int cN2=clustering2.getCluster(endNode);
if(cN1!=cN2) {
// check if the startNode moves
double c1_perm_old=perm_comm(startNode, cN1, newNetwork, clustering2, clustering2Set);
HashMap<Integer, HashSet<Integer>> temp_comm_list=new HashMap<Integer, HashSet<Integer>>(clustering2Set);
Clustering temp_comm_list_node_cluster=(Clustering) clustering2.clone();
ArrayList<Integer> queue=new ArrayList<Integer>();
queue.add(startNode);
HashMap<Integer, Integer> visited=new HashMap<Integer, Integer>();
visited.put(queue.get(0), 0);
while(queue.size()>0) {
int c=temp_comm_list_node_cluster.getCluster(queue.get(0));
ArrayList<Integer> evaluated=new ArrayList<Integer>();
for(Map.Entry<Integer, Integer> vis:visited.entrySet()) {
if(vis.getValue()==0 && vis.getKey()!=endNode) {
double p_1=permanence(vis.getKey(), c, newNetwork, temp_comm_list_node_cluster, temp_comm_list);
HashMap<Integer, HashSet<Integer>> temp_comm_list_new=new HashMap<Integer, HashSet<Integer>>(temp_comm_list);
Clustering temp_comm_list_new_node_cluster=(Clustering) temp_comm_list_node_cluster.clone();
temp_comm_list_new.get(c).remove(vis.getKey());
int c_v=temp_comm_list_new_node_cluster.getCluster(endNode);
temp_comm_list_new.get(c_v).add(vis.getKey());
temp_comm_list_new_node_cluster.cluster[vis.getKey()]=c_v;
double p_2=permanence(vis.getKey(), c_v, newNetwork, temp_comm_list_new_node_cluster, temp_comm_list_new);
visited.replace(vis.getKey(), 1);
if(p_2>p_1) {
temp_comm_list.get(c).remove(vis.getKey());
temp_comm_list.get(c_v).add(vis.getKey());
temp_comm_list_node_cluster.cluster[vis.getKey()]=c_v;
}
else
evaluated.add(vis.getKey());
}
}
ArrayList<Integer> que_add=new ArrayList<Integer>();
for(int q:queue) {
if(!evaluated.contains(q)) {
for (int k = newNetwork.firstNeighborIndex[q]; k < newNetwork.firstNeighborIndex[q + 1]; k++){
int n=newNetwork.neighbor[k];
if(temp_comm_list_node_cluster.cluster[n]!=c || visited.containsKey(n) || n==endNode) {
}
else {
que_add.add(n);
if(visited.containsKey(n)) {
visited.replace(n, 0);
}
else {
visited.put(n, 0);
}
}
}
}
}
queue=new ArrayList<Integer>(que_add);
}
double diff_c1=c1_perm_old-perm_comm(startNode, cN1, newNetwork, temp_comm_list_node_cluster, temp_comm_list);
// check if the endNode moves
double c2_perm_old=perm_comm(endNode, cN2, newNetwork, clustering2, clustering2Set);
HashMap<Integer, HashSet<Integer>> temp_comm_list2=new HashMap<Integer, HashSet<Integer>>(clustering2Set);
Clustering temp_comm_list_node_cluster2=(Clustering) clustering2.clone();
queue=new ArrayList<Integer>();
queue.add(endNode);
visited=new HashMap<Integer, Integer>();
visited.put(queue.get(0), 0);
while(queue.size()>0) {
int c=temp_comm_list_node_cluster2.getCluster(queue.get(0));
ArrayList<Integer> evaluated=new ArrayList<Integer>();
for(Map.Entry<Integer, Integer> vis:visited.entrySet()) {
if(vis.getValue()==0 && vis.getKey()!=startNode) {
double p_1=permanence(vis.getKey(), c, newNetwork, temp_comm_list_node_cluster2, temp_comm_list2);
HashMap<Integer, HashSet<Integer>> temp_comm_list_new=new HashMap<Integer, HashSet<Integer>>(temp_comm_list2);
Clustering temp_comm_list_new_node_cluster=(Clustering) temp_comm_list_node_cluster2.clone();
temp_comm_list_new.get(c).remove(vis.getKey());
int c_v=temp_comm_list_new_node_cluster.getCluster(startNode);
temp_comm_list_new.get(c_v).add(vis.getKey());
temp_comm_list_new_node_cluster.cluster[vis.getKey()]=c_v;
double p_2=permanence(vis.getKey(), c_v, newNetwork, temp_comm_list_new_node_cluster, temp_comm_list_new);
visited.replace(vis.getKey(), 1);
if(p_2>p_1) {
temp_comm_list2.get(c).remove(vis.getKey());
temp_comm_list2.get(c_v).add(vis.getKey());
temp_comm_list_node_cluster2.cluster[vis.getKey()]=c_v;
}
else
evaluated.add(vis.getKey());
}
}
ArrayList<Integer> que_add=new ArrayList<Integer>();
for(int q:queue) {
if(!evaluated.contains(q)) {
for (int k = newNetwork.firstNeighborIndex[q]; k < newNetwork.firstNeighborIndex[q + 1]; k++){
int n=newNetwork.neighbor[k];
if(temp_comm_list_node_cluster2.cluster[n]!=c || visited.containsKey(n) || n==startNode) {
}
else {
que_add.add(n);
if(visited.containsKey(n)) {
visited.replace(n, 0);
}
else {
visited.put(n, 0);
}
}
}
}
}
queue=new ArrayList<Integer>(que_add);
}
double diff_c2=c2_perm_old-perm_comm(endNode, cN2, newNetwork, temp_comm_list_node_cluster2, temp_comm_list2);
// retain community structure having greater difference
if(diff_c1>diff_c2) {
clustering2Set=new HashMap<Integer, HashSet<Integer>>(temp_comm_list);
clustering2=(Clustering) temp_comm_list_node_cluster.clone();;
}
else {
clustering2Set=new HashMap<Integer, HashSet<Integer>>(temp_comm_list2);
clustering2=(Clustering) temp_comm_list_node_cluster2.clone();;
}
}
}
return clustering2;
}
public static double perm_comm(int node, int c_node, Network newNetwork, Clustering clustering2, HashMap<Integer, HashSet<Integer>> clustering2Set) {
double perm=0;
HashSet<Integer> c_node_set=clustering2Set.get(c_node);
if(c_node_set.size()>0) {
for(int i: c_node_set) {
perm+=permanence(node, c_node, newNetwork, clustering2, clustering2Set);
}
perm=perm/(double)c_node_set.size();
}
return perm;
}
public static double permanence(int node, int c_node, Network newNetwork, Clustering clustering2, HashMap<Integer, HashSet<Integer>> clustering2Set) {
HashSet<Integer> i_neigh=new HashSet<Integer>();
HashSet<Integer> c_node_set=clustering2Set.get(c_node);
int internal_neighbors=0;
int d_u=0;
int e_max=0;
double perm=0;
HashMap<Integer, Integer> comm_neighbors=new HashMap<Integer, Integer>();
for (int k = newNetwork.firstNeighborIndex[node]; k < newNetwork.firstNeighborIndex[node + 1]; k++){
d_u++;
int c_neighbor=clustering2.cluster[newNetwork.neighbor[k]];
if(c_neighbor==c_node) {
internal_neighbors++;
i_neigh.add(k);
}
else {
if(comm_neighbors.containsKey(c_neighbor)) {
comm_neighbors.replace(c_neighbor, comm_neighbors.get(c_neighbor)+1);
}
else {
comm_neighbors.put(c_neighbor, 1);
}
if(comm_neighbors.get(c_neighbor)>e_max)
e_max=comm_neighbors.get(c_neighbor);
}
}
int numerator=0;
if(e_max==0 && d_u!=0) {
perm=(double)internal_neighbors/(double)d_u;
}
else if(e_max==0 && d_u==0) {
perm=0;
}
else {
for(int i:c_node_set) {
for(int j:c_node_set) {
if(i<j && i!=node && j!=node && i_neigh.contains(i) && i_neigh.contains(j) && network_has_edge(newNetwork, i, j)) {
numerator+=2;
}
}
}
double denominator = (double)internal_neighbors * (double)(internal_neighbors - 1) / 2.0;
if(denominator==0)
denominator=1;
double c_in = (double)numerator / denominator;
perm = ((double)internal_neighbors / (double)d_u) * (1 / (double)e_max) - 1 + c_in;
}
return perm;
}
public static boolean network_has_edge(Network network, int node_1, int node_2) {
if(node_1<node_2) {
for (int k = network.firstNeighborIndex[node_1]; k < network.firstNeighborIndex[node_1 + 1]; k++){
if(network.neighbor[k]==node_2)
return true;
}
}
else {
for (int k = network.firstNeighborIndex[node_2]; k < network.firstNeighborIndex[node_2 + 1]; k++){
if(network.neighbor[k]==node_1)
return true;
}
}
return false;
}
private static void writeOutputFile(String fileName, Clustering clustering) throws IOException{
BufferedWriter bufferedWriter;
int i, nNodes;
nNodes = clustering.getNNodes();
clustering.orderClustersByNNodes();
bufferedWriter = new BufferedWriter(new FileWriter(fileName));
for (i = 0; i < nNodes; i++){
bufferedWriter.write(Integer.toString(clustering.getCluster(i)));
bufferedWriter.newLine();
}
bufferedWriter.close();
}
}
| 17,139 | 39.234742 | 169 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/DynaMo/Arrays2.java | package org.dzhuang.dynamic.DynaMo;
/**
* Arrays2
*
* @author Ludo Waltman
* @author Nees Jan van Eck
* @version 1.3.1, 11/17/14
*/
import java.util.Arrays;
import java.util.Random;
public class Arrays2{
public static double calcSum(double[] value){
double sum;
int i;
sum = 0;
for (i = 0; i < value.length; i++)
sum += value[i];
return sum;
}
public static double calcSum(double[] value, int beginIndex, int endIndex){
double sum;
int i;
sum = 0;
for (i = beginIndex; i < endIndex; i++)
sum += value[i];
return sum;
}
public static double calcAverage(double[] value){
double average;
int i;
average = 0;
for (i = 0; i < value.length; i++)
average += value[i];
average /= value.length;
return average;
}
public static double calcMedian(double[] value){
double median;
double[] sortedValue;
sortedValue = (double[])value.clone();
Arrays.sort(sortedValue);
if (sortedValue.length % 2 == 1)
median = sortedValue[(sortedValue.length - 1) / 2];
else
median = (sortedValue[sortedValue.length / 2 - 1] + sortedValue[sortedValue.length / 2]) / 2;
return median;
}
public static double calcMinimum(double[] value){
double minimum;
int i;
minimum = value[0];
for (i = 1; i < value.length; i++)
minimum = Math.min(minimum, value[i]);
return minimum;
}
public static double calcMaximum(double[] value){
double maximum;
int i;
maximum = value[0];
for (i = 1; i < value.length; i++)
maximum = Math.max(maximum, value[i]);
return maximum;
}
public static int calcMaximum(int[] value){
int i, maximum;
maximum = value[0];
for (i = 1; i < value.length; i++)
maximum = Math.max(maximum, value[i]);
return maximum;
}
public static int[] generateRandomPermutation(int nElements){
return generateRandomPermutation(nElements, new Random());
}
public static int[] generateRandomPermutation(int nElements, Random random){
int i, j, k;
int[] permutation;
permutation = new int[nElements];
for (i = 0; i < nElements; i++)
permutation[i] = i;
for (i = 0; i < nElements; i++){
j = random.nextInt(nElements);
k = permutation[i];
permutation[i] = permutation[j];
permutation[j] = k;
}
return permutation;
}
}
| 2,692 | 23.935185 | 105 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/DynaMo/Network.java | package org.dzhuang.dynamic.DynaMo;
/**
* Network
*
* @author Ludo Waltman
* @author Nees Jan van Eck
* @version 1.3.1, 08/30/15
*/
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Random;
public class Network implements Serializable{
private static final long serialVersionUID = 1;
public double totalEdgeWeight;
public int nNodes;
protected int nEdges;
protected double[] nodeWeight;
public int[] firstNeighborIndex;
public int[] neighbor;
public double[] edgeWeight;
public double totalEdgeWeightSelfLinks;
public static Network load(String fileName) throws ClassNotFoundException, IOException{
Network network;
ObjectInputStream objectInputStream;
objectInputStream = new ObjectInputStream(new FileInputStream(fileName));
network = (Network)objectInputStream.readObject();
objectInputStream.close();
return network;
}
public Network(int nNodes, int[][] edge){
this(nNodes, null, edge, null);
}
public Network(int nNodes, double[] nodeWeight, int[][] edge){
this(nNodes, nodeWeight, edge, null);
}
public Network(int nNodes, int[][] edge, double[] edgeWeight){
this(nNodes, null, edge, edgeWeight);
}
public Network(int nNodes, double[] nodeWeight, int[][] edge, double[] edgeWeight){
double[] edgeWeight2;
int i, j;
int[] neighbor;
this.nNodes = nNodes;
nEdges = 0;
firstNeighborIndex = new int[nNodes + 1];
neighbor = new int[edge[0].length];
edgeWeight2 = new double[edge[0].length];
totalEdgeWeightSelfLinks = 0;
i = 1;
for (j = 0; j < edge[0].length; j++)
if (edge[0][j] != edge[1][j]){
if (edge[0][j] >= i)
for (; i <= edge[0][j]; i++)
firstNeighborIndex[i] = nEdges;
neighbor[nEdges] = edge[1][j];
edgeWeight2[nEdges] = (edgeWeight != null) ? edgeWeight[j] : 1;
nEdges++;
}
else
totalEdgeWeightSelfLinks += (edgeWeight != null) ? edgeWeight[j] : 1;
for (; i <= nNodes; i++)
firstNeighborIndex[i] = nEdges;
this.neighbor = Arrays.copyOfRange(neighbor, 0, nEdges);
this.edgeWeight = Arrays.copyOfRange(edgeWeight2, 0, nEdges);
this.nodeWeight = (nodeWeight != null) ? (double[])nodeWeight.clone() : getTotalEdgeWeightPerNode();
}
public Network(int nNodes, int[] firstNeighborIndex, int[] neighbor){
this(nNodes, null, firstNeighborIndex, neighbor, null);
}
public Network(int nNodes, double[] nodeWeight, int[] firstNeighborIndex, int[] neighbor){
this(nNodes, nodeWeight, firstNeighborIndex, neighbor, null);
}
public Network(int nNodes, int[] firstNeighborIndex, int[] neighbor, double[] edgeWeight){
this(nNodes, null, firstNeighborIndex, neighbor, edgeWeight);
}
public Network(int nNodes, double[] nodeWeight, int[] firstNeighborIndex, int[] neighbor, double[] edgeWeight){
this.nNodes = nNodes;
nEdges = neighbor.length;
this.firstNeighborIndex = (int[])firstNeighborIndex.clone();
this.neighbor = (int[])neighbor.clone();
if (edgeWeight != null) {
this.edgeWeight = (double[])edgeWeight.clone();
this.totalEdgeWeight=Arrays2.calcSum(edgeWeight) / 2;
}
else{
this.edgeWeight = new double[nEdges];
edgeWeight=new double[nEdges];
Arrays.fill(this.edgeWeight, 1);
Arrays.fill(edgeWeight, 1);
this.totalEdgeWeight=Arrays2.calcSum(edgeWeight) / 2;
}
totalEdgeWeightSelfLinks = 0;
this.nodeWeight = (nodeWeight != null) ? (double[])nodeWeight.clone() : getTotalEdgeWeightPerNode();
}
public void save(String fileName) throws IOException{
ObjectOutputStream objectOutputStream;
objectOutputStream = new ObjectOutputStream(new FileOutputStream(fileName));
objectOutputStream.writeObject(this);
objectOutputStream.close();
}
public int getNNodes(){
return nNodes;
}
public double getTotalNodeWeight(){
return Arrays2.calcSum(nodeWeight);
}
public double[] getNodeWeights(){
return (double[])nodeWeight.clone();
}
public double getNodeWeight(int node){
return nodeWeight[node];
}
public int getNEdges(){
return nEdges / 2;
}
public int getNEdges(int node){
return firstNeighborIndex[node + 1] - firstNeighborIndex[node];
}
public int[] getNEdgesPerNode(){
int i;
int[] nEdgesPerNode;
nEdgesPerNode = new int[nNodes];
for (i = 0; i < nNodes; i++)
nEdgesPerNode[i] = firstNeighborIndex[i + 1] - firstNeighborIndex[i];
return nEdgesPerNode;
}
public int[][] getEdges(){
int i;
int[][] edge;
edge = new int[2][];
edge[0] = new int[nEdges];
for (i = 0; i < nNodes; i++)
Arrays.fill(edge[0], firstNeighborIndex[i], firstNeighborIndex[i + 1], i);
edge[1] = (int[])neighbor.clone();
return edge;
}
public int[] getEdges(int node){
return Arrays.copyOfRange(neighbor, firstNeighborIndex[node], firstNeighborIndex[node + 1]);
}
public int[][] getEdgesPerNode(){
int i;
int[][] edgePerNode;
edgePerNode = new int[nNodes][];
for (i = 0; i < nNodes; i++)
edgePerNode[i] = Arrays.copyOfRange(neighbor, firstNeighborIndex[i], firstNeighborIndex[i + 1]);
return edgePerNode;
}
public double getTotalEdgeWeight(){
return Arrays2.calcSum(edgeWeight) / 2;
}
public double getTotalEdgeWeight(int node){
return Arrays2.calcSum(edgeWeight, firstNeighborIndex[node], firstNeighborIndex[node + 1]);
}
public double[] getTotalEdgeWeightPerNode(){
double[] totalEdgeWeightPerNode;
int i;
totalEdgeWeightPerNode = new double[nNodes];
for (i = 0; i < nNodes; i++)
totalEdgeWeightPerNode[i] = Arrays2.calcSum(edgeWeight, firstNeighborIndex[i], firstNeighborIndex[i + 1]);
return totalEdgeWeightPerNode;
}
public double[] getEdgeWeights(){
return (double[])edgeWeight.clone();
}
public double[] getEdgeWeights(int node){
return Arrays.copyOfRange(edgeWeight, firstNeighborIndex[node], firstNeighborIndex[node + 1]);
}
public double[][] getEdgeWeightsPerNode(){
double[][] edgeWeightPerNode;
int i;
edgeWeightPerNode = new double[nNodes][];
for (i = 0; i < nNodes; i++)
edgeWeightPerNode[i] = Arrays.copyOfRange(edgeWeight, firstNeighborIndex[i], firstNeighborIndex[i + 1]);
return edgeWeightPerNode;
}
public double getTotalEdgeWeightSelfLinks(){
return totalEdgeWeightSelfLinks;
}
public Network createNetworkWithoutNodeWeights(){
Network networkWithoutNodeWeights;
networkWithoutNodeWeights = new Network();
networkWithoutNodeWeights.nNodes = nNodes;
networkWithoutNodeWeights.nEdges = nEdges;
networkWithoutNodeWeights.nodeWeight = new double[nNodes];
Arrays.fill(networkWithoutNodeWeights.nodeWeight, 1);
networkWithoutNodeWeights.firstNeighborIndex = firstNeighborIndex;
networkWithoutNodeWeights.neighbor = neighbor;
networkWithoutNodeWeights.edgeWeight = edgeWeight;
networkWithoutNodeWeights.totalEdgeWeightSelfLinks = totalEdgeWeightSelfLinks;
return networkWithoutNodeWeights;
}
public Network createNetworkWithoutEdgeWeights(){
Network networkWithoutEdgeWeights;
networkWithoutEdgeWeights = new Network();
networkWithoutEdgeWeights.nNodes = nNodes;
networkWithoutEdgeWeights.nEdges = nEdges;
networkWithoutEdgeWeights.nodeWeight = nodeWeight;
networkWithoutEdgeWeights.firstNeighborIndex = firstNeighborIndex;
networkWithoutEdgeWeights.neighbor = neighbor;
networkWithoutEdgeWeights.edgeWeight = new double[nEdges];
Arrays.fill(networkWithoutEdgeWeights.edgeWeight, 1);
networkWithoutEdgeWeights.totalEdgeWeightSelfLinks = 0;
return networkWithoutEdgeWeights;
}
public Network createNetworkWithoutNodeAndEdgeWeights(){
Network networkWithoutNodeAndEdgeWeights;
networkWithoutNodeAndEdgeWeights = new Network();
networkWithoutNodeAndEdgeWeights.nNodes = nNodes;
networkWithoutNodeAndEdgeWeights.nEdges = nEdges;
networkWithoutNodeAndEdgeWeights.nodeWeight = new double[nNodes];
Arrays.fill(networkWithoutNodeAndEdgeWeights.nodeWeight, 1);
networkWithoutNodeAndEdgeWeights.firstNeighborIndex = firstNeighborIndex;
networkWithoutNodeAndEdgeWeights.neighbor = neighbor;
networkWithoutNodeAndEdgeWeights.edgeWeight = new double[nEdges];
Arrays.fill(networkWithoutNodeAndEdgeWeights.edgeWeight, 1);
networkWithoutNodeAndEdgeWeights.totalEdgeWeightSelfLinks = 0;
return networkWithoutNodeAndEdgeWeights;
}
public Network createNormalizedNetwork1(){
double totalNodeWeight;
int i, j;
Network normalizedNetwork;
normalizedNetwork = new Network();
normalizedNetwork.nNodes = nNodes;
normalizedNetwork.nEdges = nEdges;
normalizedNetwork.nodeWeight = new double[nNodes];
Arrays.fill(normalizedNetwork.nodeWeight, 1);
normalizedNetwork.firstNeighborIndex = firstNeighborIndex;
normalizedNetwork.neighbor = neighbor;
normalizedNetwork.edgeWeight = new double[nEdges];
totalNodeWeight = getTotalNodeWeight();
for (i = 0; i < nNodes; i++)
for (j = firstNeighborIndex[i]; j < firstNeighborIndex[i + 1]; j++)
normalizedNetwork.edgeWeight[j] = edgeWeight[j] / ((nodeWeight[i] * nodeWeight[neighbor[j]]) / totalNodeWeight);
normalizedNetwork.totalEdgeWeightSelfLinks = 0;
return normalizedNetwork;
}
public Network createNormalizedNetwork2(){
int i, j;
Network normalizedNetwork;
normalizedNetwork = new Network();
normalizedNetwork.nNodes = nNodes;
normalizedNetwork.nEdges = nEdges;
normalizedNetwork.nodeWeight = new double[nNodes];
Arrays.fill(normalizedNetwork.nodeWeight, 1);
normalizedNetwork.firstNeighborIndex = firstNeighborIndex;
normalizedNetwork.neighbor = neighbor;
normalizedNetwork.edgeWeight = new double[nEdges];
for (i = 0; i < nNodes; i++)
for (j = firstNeighborIndex[i]; j < firstNeighborIndex[i + 1]; j++)
normalizedNetwork.edgeWeight[j] = edgeWeight[j] / (2 / (nNodes / nodeWeight[i] + nNodes / nodeWeight[neighbor[j]]));
normalizedNetwork.totalEdgeWeightSelfLinks = 0;
return normalizedNetwork;
}
public Network createPrunedNetwork(int nEdges){
return createPrunedNetwork(nEdges, new Random());
}
public Network createPrunedNetwork(int nEdges, Random random){
double edgeWeightThreshold, randomNumberThreshold;
double[] edgeWeight, randomNumber;
int i, j, k, nEdgesAboveThreshold, nEdgesAtThreshold;
int[] nodePermutation;
Network prunedNetwork;
nEdges *= 2;
if (nEdges >= this.nEdges)
return this;
edgeWeight = new double[this.nEdges / 2];
i = 0;
for (j = 0; j < nNodes; j++)
for (k = firstNeighborIndex[j]; k < firstNeighborIndex[j + 1]; k++)
if (neighbor[k] < j){
edgeWeight[i] = this.edgeWeight[k];
i++;
}
Arrays.sort(edgeWeight);
edgeWeightThreshold = edgeWeight[(this.nEdges - nEdges) / 2];
nEdgesAboveThreshold = 0;
while (edgeWeight[this.nEdges / 2 - nEdgesAboveThreshold - 1] > edgeWeightThreshold)
nEdgesAboveThreshold++;
nEdgesAtThreshold = 0;
while ((nEdgesAboveThreshold + nEdgesAtThreshold < this.nEdges / 2) && (edgeWeight[this.nEdges / 2 - nEdgesAboveThreshold - nEdgesAtThreshold - 1] == edgeWeightThreshold))
nEdgesAtThreshold++;
nodePermutation = Arrays2.generateRandomPermutation(nNodes, random);
randomNumber = new double[nEdgesAtThreshold];
i = 0;
for (j = 0; j < nNodes; j++)
for (k = firstNeighborIndex[j]; k < firstNeighborIndex[j + 1]; k++)
if ((neighbor[k] < j) && (this.edgeWeight[k] == edgeWeightThreshold)){
randomNumber[i] = generateRandomNumber(j, neighbor[k], nodePermutation);
i++;
}
Arrays.sort(randomNumber);
randomNumberThreshold = randomNumber[nEdgesAboveThreshold + nEdgesAtThreshold - nEdges / 2];
prunedNetwork = new Network();
prunedNetwork.nNodes = nNodes;
prunedNetwork.nEdges = nEdges;
prunedNetwork.nodeWeight = nodeWeight;
prunedNetwork.firstNeighborIndex = new int[nNodes + 1];
prunedNetwork.neighbor = new int[nEdges];
prunedNetwork.edgeWeight = new double[nEdges];
i = 0;
for (j = 0; j < nNodes; j++){
for (k = firstNeighborIndex[j]; k < firstNeighborIndex[j + 1]; k++)
if ((this.edgeWeight[k] > edgeWeightThreshold) || ((this.edgeWeight[k] == edgeWeightThreshold) && (generateRandomNumber(j, neighbor[k], nodePermutation) >= randomNumberThreshold))){
prunedNetwork.neighbor[i] = neighbor[k];
prunedNetwork.edgeWeight[i] = this.edgeWeight[k];
i++;
}
prunedNetwork.firstNeighborIndex[j + 1] = i;
}
prunedNetwork.totalEdgeWeightSelfLinks = totalEdgeWeightSelfLinks;
return prunedNetwork;
}
public Network createSubnetwork(int[] node){
double[] subnetworkEdgeWeight;
int i, j, k;
int[] subnetworkNode, subnetworkNeighbor;
Network subnetwork;
subnetwork = new Network();
subnetwork.nNodes = node.length;
if (subnetwork.nNodes == 1){
subnetwork.nEdges = 0;
subnetwork.nodeWeight = new double[] {nodeWeight[node[0]]};
subnetwork.firstNeighborIndex = new int[2];
subnetwork.neighbor = new int[0];
subnetwork.edgeWeight = new double[0];
}
else{
subnetworkNode = new int[nNodes];
Arrays.fill(subnetworkNode, -1);
for (i = 0; i < node.length; i++)
subnetworkNode[node[i]] = i;
subnetwork.nEdges = 0;
subnetwork.nodeWeight = new double[subnetwork.nNodes];
subnetwork.firstNeighborIndex = new int[subnetwork.nNodes + 1];
subnetworkNeighbor = new int[nEdges];
subnetworkEdgeWeight = new double[nEdges];
for (i = 0; i < subnetwork.nNodes; i++){
j = node[i];
subnetwork.nodeWeight[i] = nodeWeight[j];
for (k = firstNeighborIndex[j]; k < firstNeighborIndex[j + 1]; k++)
if (subnetworkNode[neighbor[k]] >= 0){
subnetworkNeighbor[subnetwork.nEdges] = subnetworkNode[neighbor[k]];
subnetworkEdgeWeight[subnetwork.nEdges] = edgeWeight[k];
subnetwork.nEdges++;
}
subnetwork.firstNeighborIndex[i + 1] = subnetwork.nEdges;
}
subnetwork.neighbor = Arrays.copyOfRange(subnetworkNeighbor, 0, subnetwork.nEdges);
subnetwork.edgeWeight = Arrays.copyOfRange(subnetworkEdgeWeight, 0, subnetwork.nEdges);
}
subnetwork.totalEdgeWeightSelfLinks = 0;
return subnetwork;
}
public Network createSubnetwork(boolean[] nodeInSubnetwork){
int i, j;
int[] node;
i = 0;
for (j = 0; j < nNodes; j++)
if (nodeInSubnetwork[j])
i++;
node = new int[i];
i = 0;
for (j = 0; j < nNodes; j++)
if (nodeInSubnetwork[j]){
node[i] = j;
i++;
}
return createSubnetwork(node);
}
public Network createSubnetwork(Clustering clustering, int cluster){
double[] subnetworkEdgeWeight;
int[] subnetworkNeighbor, subnetworkNode;
int[][] nodePerCluster;
Network subnetwork;
nodePerCluster = clustering.getNodesPerCluster();
subnetworkNode = new int[nNodes];
subnetworkNeighbor = new int[nEdges];
subnetworkEdgeWeight = new double[nEdges];
subnetwork = createSubnetwork(clustering, cluster, nodePerCluster[cluster], subnetworkNode, subnetworkNeighbor, subnetworkEdgeWeight);
return subnetwork;
}
public Network[] createSubnetworks(Clustering clustering){
double[] subnetworkEdgeWeight;
int i;
int[] subnetworkNeighbor, subnetworkNode;
int[][] nodePerCluster;
Network[] subnetwork;
subnetwork = new Network[clustering.nClusters];
nodePerCluster = clustering.getNodesPerCluster();
subnetworkNode = new int[nNodes];
subnetworkNeighbor = new int[nEdges];
subnetworkEdgeWeight = new double[nEdges];
for (i = 0; i < clustering.nClusters; i++)
subnetwork[i] = createSubnetwork(clustering, i, nodePerCluster[i], subnetworkNode, subnetworkNeighbor, subnetworkEdgeWeight);
return subnetwork;
}
public Network createSubnetworkLargestComponent()
{
return createSubnetwork(identifyComponents(), 0);
}
public Network createReducedNetwork(Clustering clustering){
double[] reducedNetworkEdgeWeight1, reducedNetworkEdgeWeight2;
int i, j, k, l, m, n;
int[] reducedNetworkNeighbor1, reducedNetworkNeighbor2;
int[][] nodePerCluster;
Network reducedNetwork;
reducedNetwork = new Network();
reducedNetwork.nNodes = clustering.nClusters;
reducedNetwork.nEdges = 0;
reducedNetwork.nodeWeight = new double[clustering.nClusters];
reducedNetwork.firstNeighborIndex = new int[clustering.nClusters + 1];
reducedNetwork.totalEdgeWeightSelfLinks = totalEdgeWeightSelfLinks;
reducedNetworkNeighbor1 = new int[nEdges];
reducedNetworkEdgeWeight1 = new double[nEdges];
reducedNetworkNeighbor2 = new int[clustering.nClusters - 1];
reducedNetworkEdgeWeight2 = new double[clustering.nClusters];
nodePerCluster = clustering.getNodesPerCluster();
for (i = 0; i < clustering.nClusters; i++){
j = 0;
for (k = 0; k < nodePerCluster[i].length; k++){
l = nodePerCluster[i][k];
reducedNetwork.nodeWeight[i] += nodeWeight[l];
for (m = firstNeighborIndex[l]; m < firstNeighborIndex[l + 1]; m++){
n = clustering.cluster[neighbor[m]];
if (n != i){
if (reducedNetworkEdgeWeight2[n] == 0){
reducedNetworkNeighbor2[j] = n;
j++;
}
reducedNetworkEdgeWeight2[n] += edgeWeight[m];
}
else
reducedNetwork.totalEdgeWeightSelfLinks += edgeWeight[m];
}
}
for (k = 0; k < j; k++){
reducedNetworkNeighbor1[reducedNetwork.nEdges + k] = reducedNetworkNeighbor2[k];
reducedNetworkEdgeWeight1[reducedNetwork.nEdges + k] = reducedNetworkEdgeWeight2[reducedNetworkNeighbor2[k]];
reducedNetworkEdgeWeight2[reducedNetworkNeighbor2[k]] = 0;
}
reducedNetwork.nEdges += j;
reducedNetwork.firstNeighborIndex[i + 1] = reducedNetwork.nEdges;
}
reducedNetwork.neighbor = Arrays.copyOfRange(reducedNetworkNeighbor1, 0, reducedNetwork.nEdges);
reducedNetwork.edgeWeight = Arrays.copyOfRange(reducedNetworkEdgeWeight1, 0, reducedNetwork.nEdges);
return reducedNetwork;
}
public Clustering identifyComponents(){
boolean[] nodeVisited;
Clustering clustering;
int i, j, k, l;
int[] node;
clustering = new Clustering(nNodes);
clustering.nClusters = 0;
nodeVisited = new boolean[nNodes];
node = new int[nNodes];
for (i = 0; i < nNodes; i++)
if (!nodeVisited[i]){
clustering.cluster[i] = clustering.nClusters;
nodeVisited[i] = true;
node[0] = i;
j = 1;
k = 0;
do{
for (l = firstNeighborIndex[node[k]]; l < firstNeighborIndex[node[k] + 1]; l++)
if (!nodeVisited[neighbor[l]]){
clustering.cluster[neighbor[l]] = clustering.nClusters;
nodeVisited[neighbor[l]] = true;
node[j] = neighbor[l];
j++;
}
k++;
}
while (k < j);
clustering.nClusters++;
}
clustering.orderClustersByNNodes();
return clustering;
}
private Network(){
}
private double generateRandomNumber(int node1, int node2, int[] nodePermutation){
int i, j;
Random random;
if (node1 < node2){
i = node1;
j = node2;
}
else{
i = node2;
j = node1;
}
random = new Random(nodePermutation[i] * nNodes + nodePermutation[j]);
return random.nextDouble();
}
private Network createSubnetwork(Clustering clustering, int cluster, int[] node, int[] subnetworkNode, int[] subnetworkNeighbor, double[] subnetworkEdgeWeight){
int i, j, k;
Network subnetwork;
subnetwork = new Network();
subnetwork.nNodes = node.length;
if (subnetwork.nNodes == 1){
subnetwork.nEdges = 0;
subnetwork.nodeWeight = new double[] {nodeWeight[node[0]]};
subnetwork.firstNeighborIndex = new int[2];
subnetwork.neighbor = new int[0];
subnetwork.edgeWeight = new double[0];
}
else{
for (i = 0; i < node.length; i++)
subnetworkNode[node[i]] = i;
subnetwork.nEdges = 0;
subnetwork.nodeWeight = new double[subnetwork.nNodes];
subnetwork.firstNeighborIndex = new int[subnetwork.nNodes + 1];
for (i = 0; i < subnetwork.nNodes; i++){
j = node[i];
subnetwork.nodeWeight[i] = nodeWeight[j];
for (k = firstNeighborIndex[j]; k < firstNeighborIndex[j + 1]; k++)
if (clustering.cluster[neighbor[k]] == cluster){
subnetworkNeighbor[subnetwork.nEdges] = subnetworkNode[neighbor[k]];
subnetworkEdgeWeight[subnetwork.nEdges] = edgeWeight[k];
subnetwork.nEdges++;
}
subnetwork.firstNeighborIndex[i + 1] = subnetwork.nEdges;
}
subnetwork.neighbor = Arrays.copyOfRange(subnetworkNeighbor, 0, subnetwork.nEdges);
subnetwork.edgeWeight = Arrays.copyOfRange(subnetworkEdgeWeight, 0, subnetwork.nEdges);
}
subnetwork.totalEdgeWeightSelfLinks = 0;
return subnetwork;
}
}
| 23,982 | 35.671254 | 197 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/DynaMo/Clustering.java | package org.dzhuang.dynamic.DynaMo;
/**
* Clustering
*
* @author Ludo Waltman
* @author Nees Jan van Eck
* @version 1.3.1 11/17/14
*/
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.Arrays;
public class Clustering implements Cloneable, Serializable{
private static final long serialVersionUID = 1;
protected int nNodes;
public int nClusters;
public int[] cluster;
public static Clustering load(String fileName) throws ClassNotFoundException, IOException{
Clustering clustering;
ObjectInputStream objectInputStream;
objectInputStream = new ObjectInputStream(new FileInputStream(fileName));
clustering = (Clustering)objectInputStream.readObject();
objectInputStream.close();
return clustering;
}
public Clustering(int nNodes){
this.nNodes = nNodes;
cluster = new int[nNodes];
nClusters = 1;
}
public Clustering(int[] cluster){
nNodes = cluster.length;
this.cluster = (int[])cluster.clone();
nClusters = Arrays2.calcMaximum(cluster) + 1;
}
public Object clone(){
Clustering clonedClustering;
try{
clonedClustering = (Clustering)super.clone();
clonedClustering.cluster = getClusters();
clonedClustering.nNodes=this.nNodes;
clonedClustering.nClusters=this.nClusters;
return clonedClustering;
}
catch (CloneNotSupportedException e){
return null;
}
}
public void save(String fileName) throws IOException{
ObjectOutputStream objectOutputStream;
objectOutputStream = new ObjectOutputStream(new FileOutputStream(fileName));
objectOutputStream.writeObject(this);
objectOutputStream.close();
}
public int getNNodes(){
return nNodes;
}
public int getNClusters(){
return nClusters;
}
public int[] getClusters(){
return (int[])cluster.clone();
}
public int getCluster(int node){
return cluster[node];
}
public int[] getNNodesPerCluster(){
int i;
int[] nNodesPerCluster;
nNodesPerCluster = new int[nClusters];
for (i = 0; i < nNodes; i++)
nNodesPerCluster[cluster[i]]++;
return nNodesPerCluster;
}
public int[][] getNodesPerCluster(){
int i;
int[] nNodesPerCluster;
int[][] nodePerCluster;
nodePerCluster = new int[nClusters][];
nNodesPerCluster = getNNodesPerCluster();
for (i = 0; i < nClusters; i++){
nodePerCluster[i] = new int[nNodesPerCluster[i]];
nNodesPerCluster[i] = 0;
}
for (i = 0; i < nNodes; i++){
nodePerCluster[cluster[i]][nNodesPerCluster[cluster[i]]] = i;
nNodesPerCluster[cluster[i]]++;
}
return nodePerCluster;
}
public void setCluster(int node, int cluster){
this.cluster[node] = cluster;
nClusters = Math.max(nClusters, cluster + 1);
}
public void initSingletonClusters(){
int i;
for (i = 0; i < nNodes; i++)
cluster[i] = i;
nClusters = nNodes;
}
public void orderClustersByNNodes(){
class ClusterNNodes implements Comparable<ClusterNNodes>{
public int cluster;
public int nNodes;
public ClusterNNodes(int cluster, int nNodes){
this.cluster = cluster;
this.nNodes = nNodes;
}
public int compareTo(ClusterNNodes clusterNNodes){
return (clusterNNodes.nNodes > nNodes) ? 1 : ((clusterNNodes.nNodes < nNodes) ? -1 : 0);
}
}
ClusterNNodes[] clusterNNodes;
int i;
int[] newCluster, nNodesPerCluster;
nNodesPerCluster = getNNodesPerCluster();
clusterNNodes = new ClusterNNodes[nClusters];
for (i = 0; i < nClusters; i++)
clusterNNodes[i] = new ClusterNNodes(i, nNodesPerCluster[i]);
Arrays.sort(clusterNNodes);
newCluster = new int[nClusters];
i = 0;
do{
newCluster[clusterNNodes[i].cluster] = i;
i++;
}
while ((i < nClusters) && (clusterNNodes[i].nNodes > 0));
nClusters = i;
for (i = 0; i < nNodes; i++)
cluster[i] = newCluster[cluster[i]];
}
public void mergeClusters(Clustering clustering){
for (int i = 0; i < nNodes; i++)
cluster[i] = clustering.cluster[cluster[i]];
nClusters = clustering.nClusters;
}
}
| 4,775 | 25.983051 | 104 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/DynaMo/VOSClusteringTechnique.java | package org.dzhuang.dynamic.DynaMo;
import java.util.HashMap;
/**
* VOSClusteringTechnique
*
* @author Ludo Waltman
* @author Nees Jan van Eck
* @version 1.3.1, 11/23/14
*/
import java.util.Random;
public class VOSClusteringTechnique{
protected Network network;
protected Clustering clustering;
protected double resolution;
public static HashMap<String, Double> alpha2;
public static double[] beta;
public VOSClusteringTechnique(Network network, double resolution){
this.network = network;
clustering = new Clustering(network.nNodes);
clustering.initSingletonClusters();
this.resolution = resolution;
}
public VOSClusteringTechnique(Network network, Clustering clustering, double resolution){
this.network = network;
this.clustering = clustering;
this.resolution = resolution;
}
public Network getNetwork(){
return network;
}
public Clustering getClustering(){
return clustering;
}
public double getResolution(){
return resolution;
}
public void setNetwork(Network network){
this.network = network;
}
public void setClustering(Clustering clustering){
this.clustering = clustering;
}
public void setResolution(double resolution){
this.resolution = resolution;
}
public double calcQualityFunction2(){
double qualityFunction;
double[] clusterWeight;
int i, j, k;
qualityFunction = 0;
for (i = 0; i < network.nNodes; i++){
j = clustering.cluster[i];
for (k = network.firstNeighborIndex[i]; k < network.firstNeighborIndex[i + 1]; k++)
if (clustering.cluster[network.neighbor[k]] == j)
qualityFunction += network.edgeWeight[k];
}
qualityFunction += network.totalEdgeWeightSelfLinks;
clusterWeight = new double[clustering.nClusters];
for (i = 0; i < network.nNodes; i++)
clusterWeight[clustering.cluster[i]] += network.nodeWeight[i];
for (i = 0; i < clustering.nClusters; i++)
qualityFunction -= clusterWeight[i] * clusterWeight[i] * resolution;
qualityFunction /= 2 * network.getTotalEdgeWeight() + network.totalEdgeWeightSelfLinks;
return qualityFunction;
}
public double calcQualityFunction(){
alpha2=new HashMap<String, Double>();
beta=new double[clustering.nClusters];
double qualityFunction;
int i, j, k;
qualityFunction = 0;
for (i = 0; i < network.nNodes; i++){
j = clustering.cluster[i];
for (k = network.firstNeighborIndex[i]; k < network.firstNeighborIndex[i + 1]; k++){
if (clustering.cluster[network.neighbor[k]] == j)
qualityFunction += network.edgeWeight[k];
if(j<=clustering.cluster[network.neighbor[k]]) {
if(alpha2.containsKey(j+"_"+clustering.cluster[network.neighbor[k]])) {
alpha2.replace(j+"_"+clustering.cluster[network.neighbor[k]],
alpha2.get(j+"_"+clustering.cluster[network.neighbor[k]])+network.edgeWeight[k]);
}
else
alpha2.put(j+"_"+clustering.cluster[network.neighbor[k]], network.edgeWeight[k]);
}
else {
if(alpha2.containsKey(clustering.cluster[network.neighbor[k]]+"_"+j)) {
alpha2.replace(clustering.cluster[network.neighbor[k]]+"_"+j,
alpha2.get(clustering.cluster[network.neighbor[k]]+"_"+j)+network.edgeWeight[k]);
}
else
alpha2.put(clustering.cluster[network.neighbor[k]]+"_"+j, network.edgeWeight[k]);
}
}
beta[j] += network.nodeWeight[i];
}
qualityFunction += network.totalEdgeWeightSelfLinks;
for (i = 0; i < clustering.nClusters; i++)
qualityFunction -= beta[i] * beta[i] * resolution;
qualityFunction /= 2 * network.totalEdgeWeight + network.totalEdgeWeightSelfLinks;
return qualityFunction;
}
public boolean runLocalMovingAlgorithm(){
return runLocalMovingAlgorithm(new Random());
}
public boolean runLocalMovingAlgorithm(Random random){
boolean update;
double maxQualityFunction, qualityFunction;
double[] clusterWeight, edgeWeightPerCluster;
int bestCluster, i, j, k, l, nNeighboringClusters, nStableNodes, nUnusedClusters;
int[] neighboringCluster, newCluster, nNodesPerCluster, nodePermutation, unusedCluster;
if (network.nNodes == 1)
return false;
update = false;
clusterWeight = new double[network.nNodes];
nNodesPerCluster = new int[network.nNodes];
for (i = 0; i < network.nNodes; i++){
clusterWeight[clustering.cluster[i]] += network.nodeWeight[i];
nNodesPerCluster[clustering.cluster[i]]++;
}
nUnusedClusters = 0;
unusedCluster = new int[network.nNodes];
for (i = 0; i < network.nNodes; i++)
if (nNodesPerCluster[i] == 0){
unusedCluster[nUnusedClusters] = i;
nUnusedClusters++;
}
nodePermutation = Arrays2.generateRandomPermutation(network.nNodes, random);
edgeWeightPerCluster = new double[network.nNodes];
neighboringCluster = new int[network.nNodes - 1];
nStableNodes = 0;
i = 0;
do{
j = nodePermutation[i];
nNeighboringClusters = 0;
for (k = network.firstNeighborIndex[j]; k < network.firstNeighborIndex[j + 1]; k++){
l = clustering.cluster[network.neighbor[k]];
if (edgeWeightPerCluster[l] == 0){
neighboringCluster[nNeighboringClusters] = l;
nNeighboringClusters++;
}
edgeWeightPerCluster[l] += network.edgeWeight[k];
}
clusterWeight[clustering.cluster[j]] -= network.nodeWeight[j];
nNodesPerCluster[clustering.cluster[j]]--;
if (nNodesPerCluster[clustering.cluster[j]] == 0){
unusedCluster[nUnusedClusters] = clustering.cluster[j];
nUnusedClusters++;
}
bestCluster = -1;
maxQualityFunction = 0;
for (k = 0; k < nNeighboringClusters; k++){
l = neighboringCluster[k];
qualityFunction = edgeWeightPerCluster[l] - network.nodeWeight[j] * clusterWeight[l] * resolution;
if ((qualityFunction > maxQualityFunction) || ((qualityFunction == maxQualityFunction) && (l < bestCluster))){
bestCluster = l;
maxQualityFunction = qualityFunction;
}
edgeWeightPerCluster[l] = 0;
}
if (maxQualityFunction == 0){
bestCluster = unusedCluster[nUnusedClusters - 1];
nUnusedClusters--;
}
clusterWeight[bestCluster] += network.nodeWeight[j];
nNodesPerCluster[bestCluster]++;
if (bestCluster == clustering.cluster[j])
nStableNodes++;
else{
clustering.cluster[j] = bestCluster;
nStableNodes = 1;
update = true;
}
i = (i < network.nNodes - 1) ? (i + 1) : 0;
}
while (nStableNodes < network.nNodes);
newCluster = new int[network.nNodes];
clustering.nClusters = 0;
for (i = 0; i < network.nNodes; i++)
if (nNodesPerCluster[i] > 0){
newCluster[i] = clustering.nClusters;
clustering.nClusters++;
}
for (i = 0; i < network.nNodes; i++)
clustering.cluster[i] = newCluster[clustering.cluster[i]];
return update;
}
public boolean runLouvainAlgorithm(){
return runLouvainAlgorithm(new Random());
}
public boolean runLouvainAlgorithm(Random random){
boolean update, update2;
VOSClusteringTechnique VOSClusteringTechnique;
if (network.nNodes == 1)
return false;
update = runLocalMovingAlgorithm(random);
if (clustering.nClusters < network.nNodes){
VOSClusteringTechnique = new VOSClusteringTechnique(network.createReducedNetwork(clustering), resolution);
update2 = VOSClusteringTechnique.runLouvainAlgorithm(random);
if (update2){
update = true;
clustering.mergeClusters(VOSClusteringTechnique.clustering);
}
}
return update;
}
public boolean runLouvainAlgorithm2(Random random){
boolean update;
VOSClusteringTechnique VOSClusteringTechnique;
if (network.nNodes == 1)
return false;
update=false;
VOSClusteringTechnique = new VOSClusteringTechnique(network.createReducedNetwork(clustering), resolution);
update = VOSClusteringTechnique.runLouvainAlgorithm(random);
if (update){
clustering.mergeClusters(VOSClusteringTechnique.clustering);
}
return update;
}
public boolean runIteratedLouvainAlgorithm(int maxNIterations){
return runIteratedLouvainAlgorithm(maxNIterations, new Random());
}
public boolean runIteratedLouvainAlgorithm(int maxNIterations, Random random){
boolean update;
int i;
i = 0;
do{
update = runLouvainAlgorithm(random);
i++;
}
while ((i < maxNIterations) && update);
return ((i > 1) || update);
}
public int removeCluster(int cluster){
double maxQualityFunction, qualityFunction;
double[] clusterWeight, totalEdgeWeightPerCluster;
int i, j;
clusterWeight = new double[clustering.nClusters];
totalEdgeWeightPerCluster = new double[clustering.nClusters];
for (i = 0; i < network.nNodes; i++){
clusterWeight[clustering.cluster[i]] += network.nodeWeight[i];
if (clustering.cluster[i] == cluster)
for (j = network.firstNeighborIndex[i]; j < network.firstNeighborIndex[i + 1]; j++)
totalEdgeWeightPerCluster[clustering.cluster[network.neighbor[j]]] += network.edgeWeight[j];
}
i = -1;
maxQualityFunction = 0;
for (j = 0; j < clustering.nClusters; j++)
if ((j != cluster) && (clusterWeight[j] > 0)){
qualityFunction = totalEdgeWeightPerCluster[j] / clusterWeight[j];
if (qualityFunction > maxQualityFunction){
i = j;
maxQualityFunction = qualityFunction;
}
}
if (i >= 0){
for (j = 0; j < network.nNodes; j++)
if (clustering.cluster[j] == cluster)
clustering.cluster[j] = i;
if (cluster == clustering.nClusters - 1)
clustering.nClusters = Arrays2.calcMaximum(clustering.cluster) + 1;
}
return i;
}
public void removeSmallClusters(int minNNodesPerCluster){
int i, j, k;
int[] nNodesPerCluster;
VOSClusteringTechnique VOSClusteringTechnique;
VOSClusteringTechnique = new VOSClusteringTechnique(network.createReducedNetwork(clustering), resolution);
nNodesPerCluster = clustering.getNNodesPerCluster();
do{
i = -1;
j = minNNodesPerCluster;
for (k = 0; k < VOSClusteringTechnique.clustering.nClusters; k++)
if ((nNodesPerCluster[k] > 0) && (nNodesPerCluster[k] < j)){
i = k;
j = nNodesPerCluster[k];
}
if (i >= 0){
j = VOSClusteringTechnique.removeCluster(i);
if (j >= 0)
nNodesPerCluster[j] += nNodesPerCluster[i];
nNodesPerCluster[i] = 0;
}
}
while (i >= 0);
clustering.mergeClusters(VOSClusteringTechnique.clustering);
}
} | 12,304 | 33.759887 | 126 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/DynaMo/ModularityOptimizer_Louvain.java | package org.dzhuang.dynamic.DynaMo;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Random;
public class ModularityOptimizer_Louvain{
public static double resolution_default=1.0;
public static int nRandomStarts_default=10;
public static int nIterations_default=10000;
public static long randomSeed_default=0;
public static void main(String[] args) throws IOException, ClassNotFoundException{
// runLouvain("Cit-HepPh", 31);
// runLouvain("Cit-HepTh", 25);
// runLouvain("dblp_coauthorship", 31);
// runLouvain("facebook", 28);
// runLouvain("flickr", 24);
// runLouvain("youtube", 33);
// runLouvain("networks_us-101", 3991);
// runLouvain("networks_i-80", 3991);
// runLouvain("networks_lankershim", 3991);
// runLouvain("networks_peachtree", 3991);
// runLouvain("networks_us-101", 1000);
// runLouvain("networks_i-80", 1000);
// runLouvain("networks_lankershim", 1000);
// runLouvain("networks_peachtree", 1000);
runLouvain("youtube", 33);
}
public static void runLouvain(String dataSet, int nbatch) throws IOException, ClassNotFoundException{
String DyNet="data/"+dataSet+"/ntwk2/";
PrintWriter pw=new PrintWriter(dataSet+"_modularity_runLouvain");
for(int ibatch=1;ibatch<=nbatch;ibatch++){
Network network = Network.load(DyNet+ibatch);
Clustering clustering = null;
double maxModularity = Double.NEGATIVE_INFINITY;
Random random = new Random(randomSeed_default);
double resolution2 = resolution_default / (2 * network.totalEdgeWeight + network.totalEdgeWeightSelfLinks);
/***************************************************************************/
long t1=System.currentTimeMillis();
for (int i = 0; i < nRandomStarts_default; i++){
VOSClusteringTechnique VOSClusteringTechnique = new VOSClusteringTechnique(network, resolution2);
int j = 0;
boolean update = true;
do{
update = VOSClusteringTechnique.runLouvainAlgorithm(random);
j++;
}
while ((j < nIterations_default) && update);
double modularity = VOSClusteringTechnique.calcQualityFunction2();
if (modularity > maxModularity){
clustering = VOSClusteringTechnique.getClustering();
maxModularity = modularity;
}
}
long t2=System.currentTimeMillis();
/***************************************************************************/
writeOutputFile("data/"+dataSet+"/runLouvain_"+dataSet+"_com_"+(ibatch+1), clustering);
System.out.println(dataSet+"\t"+"runLouvain"+"\t"+ibatch+"\t"+maxModularity+"\t"+(t2-t1));
if(ibatch>1)
pw.println(ibatch+"\t"+maxModularity+"\t"+(t2-t1));
}
pw.close();
}
private static void writeOutputFile(String fileName, Clustering clustering) throws IOException{
BufferedWriter bufferedWriter;
int i, nNodes;
nNodes = clustering.getNNodes();
clustering.orderClustersByNNodes();
bufferedWriter = new BufferedWriter(new FileWriter(fileName));
for (i = 0; i < nNodes; i++){
bufferedWriter.write(Integer.toString(clustering.getCluster(i)));
bufferedWriter.newLine();
}
bufferedWriter.close();
}
} | 3,435 | 38.953488 | 116 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/util/Parameter.java | package org.dzhuang.dynamic.util;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
public class Parameter {
public static int HASH_BASE = 10000; //The hash base
public static String ROOT_PATH = "E:/experiment/paper-00";
public static final SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
public static final DecimalFormat df = new DecimalFormat("0.000000");
public static final DecimalFormat shortDf = new DecimalFormat("0.00");
}
| 484 | 27.529412 | 88 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/util/DateUtil.java | package org.dzhuang.dynamic.util;
import java.util.Calendar;
public class DateUtil {
public static long nextSecond(long timeStamp){
return timeStamp + 1;
}
public static long nextMinute(long timeStamp){
return timeStamp + 60;
}
public static long nextHour(long timeStamp){
long end = timeStamp + 60 * 60;
return end;
}
public static long nextDay(long timeStamp){
long end = timeStamp + 24 * 60 * 60;
return end;
}
public static long nextWeek(long timeStamp){
long end = timeStamp + 7 * 24 * 60 * 60;
return end;
}
public static long nextMonth(long timeStamp){
long end = timeStamp;
int monthArr[] = {31,28,31,30,31,30,31,31,30,31,30,31};
int leapMonthArr[] = {31,29,31,30,31,30,31,31,30,31,30,31};
Calendar cal = Calendar.getInstance();
cal.setTimeInMillis(timeStamp * 1000);
int year = cal.get(Calendar.YEAR);
int month = cal.get(Calendar.MONTH);
if((year%4 == 0 && year%100 != 0) || year%400 == 0)
end += leapMonthArr[month] * 24 * 60 * 60;
else
end += monthArr[month] * 24 * 60 * 60;
return end;
}
public static long nextKMonth(long timeStamp, int k){
long end = timeStamp;
for(int i = 0; i < k; i++){
end = nextMonth(end);
}
return end;
}
}
| 1,229 | 21.777778 | 61 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/util/Utility.java | package org.dzhuang.dynamic.util;
import org.dzhuang.dynamic.graph.*;
import java.text.DecimalFormat;
import java.util.*;
import java.io.*;
public class Utility {
public static void main(String args[]) throws Exception{
ArrayList<Integer> random = randomOrderList(10000);
ArrayList<Integer> sortedList = new ArrayList();
for(int i = 0; i < random.size(); i++){
insertIntoList(sortedList, random.get(i));
}
System.out.println(sortedList);
}
public static float sumFloat(ArrayList<Float> dataList){
float sum = 0;
for(int i = 0; i < dataList.size(); i++){
sum += dataList.get(i);
}
return sum;
}
/**
* generate a list of 0,...,n-1 with random order
* @param n
* @return
*/
public static ArrayList<Integer> randomOrderList(int n){
ArrayList<Integer> randomOrder = new ArrayList();
randomOrder.ensureCapacity(n);
for(int i = 0; i < n; i++){
randomOrder.add(new Integer(i));
}
Random rand = new Random();
for(int i = 0; i <n-1; i++){
int randPos = rand.nextInt(n);
int tmp = randomOrder.get(i);
randomOrder.set(i, randomOrder.get(randPos).intValue());
randomOrder.set(randPos, tmp);
}
return randomOrder;
}
/**
* random the order of a given list
* @param list
* @return
*/
public static ArrayList randomListOrder(ArrayList list){
ArrayList list1 = new ArrayList();
ArrayList<Integer> orderList = randomOrderList(list.size());
for(int i = 0; i < list.size(); i++){
list1.add(list.get(orderList.get(i)));
}
return list1;
}
/**
* Output the data distribution of an array
* @param data
* @param outputPath
* @throws Exception
*/
public static void printDataDistribution(int data[]) throws Exception{
TreeMap<Integer, Integer> dataMap = new TreeMap();
for(int i = 0; i < data.length; i++){
if(!dataMap.containsKey(data[i]))
dataMap.put(data[i], 1);
else
dataMap.put(data[i], dataMap.get(data[i])+1);
}
int keys[] = new int[dataMap.size()];
int values[] = new int[dataMap.size()];
double probs[] = new double[dataMap.size()];
Iterator<Integer> it = dataMap.keySet().iterator();
int k = 0;
while(it.hasNext()){
int key = it.next();
keys[k] = key;
values[k] = dataMap.get(key);
probs[k] = (double)dataMap.get(key) / data.length;
k++;
}
printArray(keys);
printArray(values);
printArray(probs);
}
/**
* Output the data distribution of a list
* @param data
* @throws Exception
*/
public static void printDataDistribution(ArrayList<Integer> data) throws Exception{
TreeMap<Integer, Integer> dataMap = new TreeMap();
for(int i = 0; i < data.size(); i++){
if(!dataMap.containsKey(data.get(i)))
dataMap.put(data.get(i), 1);
else
dataMap.put(data.get(i), dataMap.get(data.get(i))+1);
}
int keys[] = new int[dataMap.size()];
int values[] = new int[dataMap.size()];
double probs[] = new double[dataMap.size()];
Iterator<Integer> it = dataMap.keySet().iterator();
int k = 0;
while(it.hasNext()){
int key = it.next();
keys[k] = key;
values[k] = dataMap.get(key);
probs[k] = (double)dataMap.get(key) / data.size();
k++;
}
printArray(keys);
printArray(values);
printArray(probs);
}
public static void printArray(int data[]){
System.out.print("[");
for(int i = 0; i < data.length-1; i++){
System.out.print(data[i] + ",");
}
System.out.println(data[data.length-1] + "]");
}
public static void printArray(double data[]){
System.out.print("[");
for(int i = 0; i < data.length-1; i++){
System.out.print(Parameter.df.format(data[i]) + ",");
}
System.out.println(Parameter.df.format(data[data.length-1]) + "]");
}
public static double[] readArr(String inputPath) throws Exception{
ArrayList<Double> dataList = new ArrayList();
BufferedReader br = new BufferedReader(new FileReader(inputPath));
String str = br.readLine();
while(str != null){
dataList.add(new Double(str));
str = br.readLine();
}
double data[] = new double[dataList.size()];
for(int i = 0; i < dataList.size(); i++){
data[i] = dataList.get(i);
}
return data;
}
public static void writerArray(double data[], String outputPath) throws Exception{
BufferedWriter bw = new BufferedWriter(new FileWriter(outputPath));
for(int i = 0; i < data.length; i++)
bw.write(Parameter.df.format(data[i]) + "\r\n");
bw.close();
}
/**
* Find the common integer shared by the two lists
* @param list1
* @param list2
* @return
*/
public static int getCommId(ArrayList<Integer> list1, ArrayList<Integer> list2){
int commId = -1;
for(int i = 0; i < list1.size(); i++){
for(int j = 0; j < list2.size(); j++){
if(list1.get(i).intValue() == list2.get(j).intValue()){
commId = list1.get(i);
return commId;
}
}
}
return commId;
}
/**
* Randomly select n elements from a given list
* @param list
* @param n
* @return
*/
public static ArrayList<Integer> select(ArrayList<Integer> list, int n){
if(n >= list.size())
return list;
ArrayList<Integer> selectedList = new ArrayList();
HashSet<Integer> set = new HashSet();
while(set.size() < n){
int rand = (int)(Math.random()*list.size());
if(!set.contains(rand)){
set.add(rand);
selectedList.add(list.get(rand));
}
}
return selectedList;
}
/**
* remove an element from a list by its value
* @param list
* @param value
* @return
*/
public static int removeByValue(ArrayList<Integer> list, int value){
int index = -1;
for(int i = 0; i < list.size(); i++){
if(list.get(i) == value){
list.remove(i);
index = i;
break;
}
}
return index;
}
public static HashMap<Integer, String> reverseDict(HashMap<String, Integer> dict){
HashMap<Integer, String> revDict = new HashMap();
Iterator<String> it = dict.keySet().iterator();
while(it.hasNext()){
String str = it.next();
int id = dict.get(str);
revDict.put(id, str);
}
return revDict;
}
//return the number of comm elements in two list
public static int getCommNum(HashSet set1, HashSet set2){
int commNum = 0;
Iterator<Integer> it = set1.iterator();
while(it.hasNext()){
int value = it.next();
if(set2.contains(value))
commNum++;
}
return commNum;
}
//return the number of comm elements in two list
public static int getCommNum(ArrayList<Integer> list1, ArrayList<Integer> list2){
int commNum = 0;
int id1 = 0, id2 = 0;
while(id1 < list1.size() && id2 < list2.size()){
int elem1 = list1.get(id1);
int elem2 = list2.get(id2);
if(elem1 == elem2){
commNum++;
id1++;
id2++;
}
else if(elem1 < elem2)
id1++;
else
id2++;
}
return commNum;
}
/**
* Add the element pair into list and keep it in a special order
* The Binary Search algorithm is used to insert the element into the list
* @param list
* @param pair
* @param sordIndex - order the list by pair.key (sortIndex=0) or pair.value (sortIndex=1)
* @return the index of the inserted element
*/
public static int insertIntoList(ArrayList<Pair> list, Pair pair, int sortIndex){
int index = 0;
if(list.isEmpty())
list.add(pair);
else{
int low = 0, high = list.size()-1;
int mid = (low + high) / 2;
while(low < high){
Pair pm = list.get(mid);
if(sortIndex == 0){
if(pm.key <= pair.key){
low = mid + 1;
}else{
high = mid - 1;
}
}else{
if(pm.value <= pair.value){
low = mid + 1;
}else{
high = mid - 1;
}
}
if(low > high)
break;
mid = (high + low) / 2;
}
if(sortIndex == 0){
if(mid == high && list.get(mid).key < pair.key)
mid = high+1;
}else{
if(mid == high && list.get(mid).value < pair.value)
mid = high+1;
}
if(mid >= list.size()){
list.add(pair);
index = list.size() - 1;
}
else{
list.add(mid, pair);
index = mid;
}
}
return index;
}
public static int insertIntoList(ArrayList<Integer> list, int elem){
int index = 0;
if(list.isEmpty())
list.add(elem);
else{
int low = 0, high = list.size()-1;
int mid = (low + high) / 2;
while(low < high){
int m = list.get(mid);
if(m <= elem){
low = mid+1;
}
else{
high = mid-1;
}
if(low > high)
break;
mid = (high + low) / 2;
}
if(mid == high && list.get(mid) < elem)
mid = high+1;
if(mid > list.size()){
list.add(mid);
index = list.size() - 1;
}
else{
list.add(mid, elem);
index = mid;
}
}
return index;
}
public static void listSwap (ArrayList list, int i, int j){
Object o = list.get(i);
list.set(i, list.get(j));
list.set(j, o);
}
/**
* Compute the average and standard deviation of an array of lists
* @param data
* @return
*/
public static ArrayList<Float> [] avgAndSd(ArrayList<Float>[] data){
int num = data.length;
ArrayList<Float>[] result = new ArrayList[2];
result[0] = new ArrayList();
result[1] = new ArrayList();
ArrayList<Float> first = data[0];
//initialize
for(int i = 0; i < first.size(); i++){
result[0].add((float)0);
result[1].add((float)0);
}
//Compute the average
for(int i = 0; i < data.length; i++){
ArrayList<Float> list = data[i];
for(int j = 0; j < list.size(); j++){
result[0].set(j, result[0].get(j) + list.get(j));
}
}
for(int i = 0; i < result[0].size(); i++){
result[0].set(i, result[0].get(i) / num);
}
//Compute the standard deviation
for(int j = 0; j < result[0].size(); j++){
float avg = result[0].get(j);
for(int i = 0; i < result.length; i++){
float value = data[i].get(j);
float dev = (float)Math.pow(avg - value, 2);
result[1].set(j, result[1].get(j) + dev);
}
result[1].set(j, (float)Math.sqrt(result[1].get(j) / (num-1)));
}
return result;
}
public static void keepLastIntegers(ArrayList<Integer> list, int n){
while(list.size() < n){
list.remove(0);
}
}
public static void keepLastFloats(ArrayList<Float> list, int n){
while(list.size() > n){
list.remove(0);
}
}
}
| 11,221 | 26.572482 | 94 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/util/FileUtil.java | package org.dzhuang.dynamic.util;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.StringTokenizer;
import java.util.TreeSet;
import java.util.zip.GZIPInputStream;
import org.dzhuang.dynamic.graph.Data;
import org.dzhuang.dynamic.graph.LabelEdge;
import org.dzhuang.dynamic.graph.Link;
import org.dzhuang.dynamic.graph.Pair;
import org.dzhuang.dynamic.graph.Point;
public class FileUtil {
public static void main(String args[]) throws Exception{
}
public static final int BY_SECOND = 1;
public static final int BY_MINUTE = 2;
public static final int BY_HOUR = 3;
public static final int BY_DAY = 4;
public static final int BY_WEEK = 5;
public static final int BY_MONTH = 6;
public static final int BY_TWO_MONTH = 7;
public static final int BY_YEAR = 8;
public static boolean deleteDir(File dir) {
if (dir.isDirectory()) {
String[] children = dir.list();
for (int i = 0; i < children.length; i++) {
boolean success = deleteDir(new File(dir, children[i]));
if (!success)
return false;
}
}
System.out.println("The directory "+dir.toString()+" is deleted.");
return dir.delete();
}
/**
*
* @param name
* @param extend
* @return
*/
public static String extendFileName(String name, String extend){
int index = name.lastIndexOf('.');
String prefix = name.substring(0, index);
String suffix = name.substring(index+1, name.length());
String newName = prefix + extend + "." + suffix;
return newName;
}
/**
*
* @param filePath
* @param newName
* @return
*/
public static String replaceFileName(String filePath, String newName){
File file = new File(filePath);
return filePath.replace(file.getName(), newName);
}
/**
* divide the dataset into start parts and incremental parts
* @param totalNum - total number of data (e.g. an email commmunication)
* @param startRatio - the initial ratio of data
* @param incPoints - the total number of incremental batches
* @return
*/
public static int[] dataPartition(int totalNum, double startRatio, int incPoints){
int dataArr[] = new int[incPoints+1];
dataArr[0] = (int)(totalNum * startRatio);
int batchSize = (totalNum - dataArr[0]) / incPoints;
for(int i = 1; i < dataArr.length-1; i++){
dataArr[i] = batchSize;
}
dataArr[dataArr.length-1] = totalNum - dataArr[0] - (incPoints-1) * batchSize;
return dataArr;
}
/**
* divide the file into sereral sub files, each file contains part of the data
* @param intputPath
* @param outFileName
* @param dataArr - the number of lines in each of the sub files
*/
public static void filePartition(String inputPath, String outFileName, double startRatio, int incPoints) throws Exception{
int totalNum = getFileLineNum(inputPath);
int dataArr[] = dataPartition(totalNum, startRatio, incPoints);
DecimalFormat df = new DecimalFormat("00");
int sum = 0;
for(int i = 0 ; i < dataArr.length; i++){
System.out.println("Generating inc file: " + i);
String fileName = extendFileName(outFileName, "_" + df.format(i));
generateSubFile(inputPath, fileName, sum, sum+dataArr[i]);
sum += dataArr[i];
}
}
public static void fileExtract(String inputPath, String outputPath, Date startTime, Date endTime) throws Exception{
ArrayList<Data> dataList = readData(inputPath);
int cursor = 0;
long start = startTime.getTime() / 1000;
long end = endTime.getTime() / 1000;
while(cursor < dataList.size()){
Data data = dataList.get(cursor);
if(data.timestamp >= start)
break;
cursor++;
}
BufferedWriter bw = new BufferedWriter(new FileWriter(outputPath));
while(cursor < dataList.size()){
Data data = dataList.get(cursor);
if(data.timestamp >= end)
break;
bw.write(data + "\r\n");
cursor++;
}
bw.close();
}
public static void filePartition(String inputPath, String outFileName, Date startTime, Date divideTime, int periodType) throws Exception{
ArrayList<Data> dataList = readData(inputPath);
int cusor = 0;
long start = startTime.getTime() / 1000;
long divide = divideTime.getTime() / 1000;
while(cusor < dataList.size()){
Data data = dataList.get(cusor);
if(data.timestamp >= start)
break;
cusor++;
}
int fileNum = 0;
if(periodType == BY_MONTH){
int monthArr[] = {31,28,31,30,31,30,31,31,30,31,30,31};
int leapMonthArr[] = {31,29,31,30,31,30,31,31,30,31,30,31};
String fileName = extendFileName(outFileName, "_" + fileNum);
String filePath = replaceFileName(inputPath, fileName);
BufferedWriter bw = new BufferedWriter(new FileWriter(filePath));
while(cusor < dataList.size()){
Data data = dataList.get(cusor);
if(data.timestamp < divide){
bw.write(data + "\r\n");
}
else{
bw.close();
fileNum++;
fileName = extendFileName(outFileName, "_" + fileNum);
filePath = replaceFileName(inputPath, fileName);
bw = new BufferedWriter(new FileWriter(filePath));
bw.write(data + "\r\n");
Calendar cal = Calendar.getInstance();
cal.setTimeInMillis(divide * 1000);
int year = cal.get(Calendar.YEAR);
int month = cal.get(Calendar.MONTH);
if((year%4 == 0 && year%100 != 0) || year%400 == 0)
divide += leapMonthArr[month] * 24 * 60 * 60;
else
divide += monthArr[month] * 24 * 60 * 60;
}
cusor++;
}
bw.close();
}
}
public static void filePartition(String inputPath, String outFileName, Date divideTime, int periodType) throws Exception{
BufferedReader br = new BufferedReader(new FileReader(inputPath));
long divide = divideTime.getTime() / 1000;
int fileNum = 0;
if(periodType == BY_WEEK){
String fileName = extendFileName(outFileName, "_" + fileNum);
String filePath = replaceFileName(inputPath, fileName);
BufferedWriter bw = new BufferedWriter(new FileWriter(filePath));
String str = br.readLine();
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
token.nextToken();
token.nextToken();
long timeStamp = new Long(token.nextToken());
if(timeStamp < divide){
bw.write(str + "\r\n");
}
else{
bw.close();
fileNum++;
fileName = extendFileName(outFileName, "_" + fileNum);
filePath = replaceFileName(inputPath, fileName);
bw = new BufferedWriter(new FileWriter(filePath));
bw.write(str + "\r\n");
divide += 7 * 24 * 60 * 60;
}
str = br.readLine();
}
bw.close();
}
else if(periodType == BY_MONTH){
int monthArr[] = {31,28,31,30,31,30,31,31,30,31,30,31};
int leapMonthArr[] = {31,29,31,30,31,30,31,31,30,31,30,31};
String fileName = extendFileName(outFileName, "_" + fileNum);
String filePath = replaceFileName(inputPath, fileName);
BufferedWriter bw = new BufferedWriter(new FileWriter(filePath));
String str = br.readLine();
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
token.nextToken();
token.nextToken();
long timeStamp = new Long(token.nextToken());
if(timeStamp < divide){
bw.write(str + "\r\n");
}
else{
bw.close();
fileNum++;
fileName = extendFileName(outFileName, "_" + fileNum);
filePath = replaceFileName(inputPath, fileName);
bw = new BufferedWriter(new FileWriter(filePath));
bw.write(str + "\r\n");
Calendar cal = Calendar.getInstance();
cal.setTimeInMillis(divide * 1000);
int year = cal.get(Calendar.YEAR);
int month = cal.get(Calendar.MONTH);
if((year%4 == 0 && year%100 != 0) || year%400 == 0)
divide += leapMonthArr[month] * 24 * 60 * 60;
else
divide += monthArr[month] * 24 * 60 * 60;
}
str = br.readLine();
}
bw.close();
}
else if(periodType == BY_TWO_MONTH){
int monthArr[] = {31,28,31,30,31,30,31,31,30,31,30,31};
int leapMonthArr[] = {31,29,31,30,31,30,31,31,30,31,30,31};
String fileName = extendFileName(outFileName, "_" + fileNum);
String filePath = replaceFileName(inputPath, fileName);
BufferedWriter bw = new BufferedWriter(new FileWriter(filePath));
String str = br.readLine();
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
token.nextToken();
token.nextToken();
long timeStamp = new Long(token.nextToken());
if(timeStamp < divide){
bw.write(str + "\r\n");
}
else{
bw.close();
fileNum++;
fileName = extendFileName(outFileName, "_" + fileNum);
filePath = replaceFileName(inputPath, fileName);
bw = new BufferedWriter(new FileWriter(filePath));
bw.write(str + "\r\n");
Calendar cal = Calendar.getInstance();
cal.setTimeInMillis(divide * 1000);
int year = cal.get(Calendar.YEAR);
int month = cal.get(Calendar.MONTH);
if((year%4 == 0 && year%100 != 0) || year%400 == 0)
divide += leapMonthArr[month] * 24 * 60 * 60;
else
divide += monthArr[month] * 24 * 60 * 60;
cal = Calendar.getInstance();
cal.setTimeInMillis(divide * 1000);
year = cal.get(Calendar.YEAR);
month = cal.get(Calendar.MONTH);
if((year%4 == 0 && year%100 != 0) || year%400 == 0)
divide += leapMonthArr[month] * 24 * 60 * 60;
else
divide += monthArr[month] * 24 * 60 * 60;
}
str = br.readLine();
}
bw.close();
}
br.close();
}
/**
* Generate the decrease files, which consist of the early data
* @param inputPath
* @param outFileName
* @param points
* @param periodType
* @throws Exception
*/
public static void generateDecreaseFiles(String inputPath, String outFileName, int points, int periodType) throws Exception{
ArrayList<Data> dataList = readData(inputPath);
int index = 0;
for(int i = 0; i < points; i++){
int fileNo = i+1;
long fromTime = dataList.get(index).timestamp;
long toTime = fromTime;
String fileName = extendFileName(outFileName, "_" + fileNo);
String filePath = replaceFileName(inputPath, fileName);
System.out.println("Generating file: " + filePath);
BufferedWriter bw = new BufferedWriter(new FileWriter(filePath));
if(periodType == BY_MONTH){
toTime = DateUtil.nextMonth(fromTime);
}
while(index < dataList.size() && dataList.get(index).timestamp < toTime){
bw.write(dataList.get(index) + "\r\n");
index++;
}
bw.close();
}
}
public static void generateDecreaseFiles(String inputPath, String outFileName, Date startTime, int points, int periodType) throws Exception{
ArrayList<Data> dataList = readData(inputPath);
int index = 0;
long time = startTime.getTime() / 1000;
while(index < dataList.size()){
Data data = dataList.get(index);
if(data.timestamp >= time)
break;
index++;
}
for(int i = 0; i < points; i++){
int fileNo = i+1;
String fileName = extendFileName(outFileName, "_" + fileNo);
String filePath = replaceFileName(inputPath, fileName);
System.out.println("Generating file: " + filePath);
BufferedWriter bw = new BufferedWriter(new FileWriter(filePath));
if(periodType == BY_MONTH){
time = DateUtil.nextMonth(time);
}
while(index < dataList.size() && dataList.get(index).timestamp < time){
bw.write(dataList.get(index) + "\r\n");
index++;
}
bw.close();
}
}
/**
* generate a sub file which contains parts of the lines of data from the original data file
* @param inputPath
* @param outFileName
* @param startLine
* @param endLine
* @throws Exception
*/
public static void generateSubFile(String inputPath, String outFileName, int startLine, int endLine) throws Exception{
BufferedReader br = new BufferedReader(new FileReader(inputPath));
BufferedWriter bw = new BufferedWriter(new FileWriter(replaceFileName(inputPath, outFileName)));
int lineNum = 0;
String str = br.readLine();
while(str != null){
if(lineNum >= startLine && lineNum < endLine){
bw.write(str + "\r\n");
}
lineNum++;
str = br.readLine();
}
br.close();
bw.close();
}
public static int getFileLineNum(String filePath) throws Exception{
int lineNum = 0;
BufferedReader br = new BufferedReader(new FileReader(filePath));
String str = br.readLine();
while(str != null){
lineNum++;
str = br.readLine();
}
br.close();
return lineNum;
}
public static HashSet<String> readSet(String inputPath) throws Exception{
BufferedReader br = new BufferedReader(new FileReader(inputPath));
HashSet<String> set = new HashSet();
String str = br.readLine();
while(str != null){
set.add(str);
str = br.readLine();
}
return set;
}
/**
* write set value to file
* @param outputPath
* @param set
* @throws Exception
*/
public static void writeSet(String outputPath, HashSet<String> set) throws Exception{
BufferedWriter bw = new BufferedWriter(new FileWriter(outputPath));
Iterator<String> it = set.iterator();
while(it.hasNext()){
String value = it.next();
bw.write(value + "\r\n");
}
bw.close();
}
/**
* return the number of nodes and edges appearing in the first several lines of the data set
* the format of the graph file is: src(\t)dest(\t)other
* @param graphPath
* @param lines - the number of lines, if lines=-1, return the total number of lines
* @return
*/
public static int getNodeNum(String graphPath) throws Exception{
HashSet<Integer> nodeSet = new HashSet<Integer>();
BufferedReader br = new BufferedReader(new FileReader(graphPath));
String str = br.readLine();
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
int src = new Integer(token.nextToken());
int dest = new Integer(token.nextToken());
if(!nodeSet.contains(src))
nodeSet.add(src);
if(!nodeSet.contains(dest))
nodeSet.add(dest);
str = br.readLine();
}
br.close();
return nodeSet.size();
}
public static HashMap<String, Integer> getDict(String graphPath) throws Exception{
HashMap<String, Integer> nodeDict = new HashMap<String, Integer>();
BufferedReader br = new BufferedReader(new FileReader(graphPath));
int nodeId = 0;
String str = br.readLine();
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
String src = token.nextToken();
String dest = token.nextToken();
if(!nodeDict.containsKey(src))
nodeDict.put(src, nodeId++);
if(!nodeDict.containsKey(dest))
nodeDict.put(dest, nodeId++);
str = br.readLine();
}
br.close();
return nodeDict;
}
public static void unGzip(String inputPath, String outputPath) throws Exception{
GZIPInputStream gzin = new GZIPInputStream(new FileInputStream(inputPath));
BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(outputPath));
int b;
byte [] bytes = new byte[1024];
while((b = gzin.read(bytes)) > 0){
bos.write(bytes, 0, b);
}
gzin.close();
bos.close();
}
public static void generateGraphFile(String incPath, String graphPath) throws Exception{
BufferedReader br = new BufferedReader(new FileReader(incPath));
BufferedWriter bw = new BufferedWriter(new FileWriter(graphPath));
HashSet<Point> edgeSet = new HashSet();
String str = br.readLine();
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
int srcId = new Integer(token.nextToken());
int destId = new Integer(token.nextToken());
if(!edgeSet.contains(new Point(srcId, destId)) && !edgeSet.contains(new Point(destId, srcId))){
bw.write(srcId + "\t" + destId + "\t1\r\n");
}
edgeSet.add(new Point(srcId, destId));
str = br.readLine();
}
br.close();
bw.close();
}
public static void appendContent(String filePath, String content) throws Exception{
BufferedWriter bw = new BufferedWriter(new FileWriter(filePath, true));
bw.write(content);
bw.close();
}
/**
* read all the content of the file into a String
* @param filePath
* @return
*/
public static String readOneString(String filePath) throws Exception{
String result = "";
BufferedReader br = new BufferedReader(new FileReader(filePath));
String str = br.readLine();
while(str != null){
result += str + "\r\n";
str = br.readLine();
}
return result;
}
public static void writeString(String filePath, String content) throws Exception{
BufferedWriter bw = new BufferedWriter(new FileWriter(filePath));
bw.write(content);
bw.close();
}
public static void deleteFile(String filePath){
File file = new File(filePath);
if(file.exists() && !file.isDirectory())
file.delete();
}
public static void aggregateFiles(ArrayList<String> inputList, String outputPath) throws Exception{
BufferedWriter bw = new BufferedWriter(new FileWriter(outputPath));
for(int i = 0; i < inputList.size(); i++){
BufferedReader br = new BufferedReader(new FileReader(inputList.get(i)));
String str = br.readLine();
while(str != null){
bw.write(str + "\r\n");
str = br.readLine();
}
br.close();
}
bw.close();
}
public static void append(String basePath, String incPath) throws Exception{
BufferedWriter bw = new BufferedWriter(new FileWriter(basePath, true));
BufferedReader br = new BufferedReader(new FileReader(incPath));
String str = br.readLine();
while(str != null){
bw.write(str + "\r\n");
str = br.readLine();
}
br.close();
bw.close();
}
public static ArrayList<Data> readData(String filePath) throws Exception{
ArrayList<Data> dataList = new ArrayList();
BufferedReader br = new BufferedReader(new FileReader(filePath));
String str = br.readLine();
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
String from = token.nextToken();
String to = token.nextToken();
long timestamp = new Long(token.nextToken());
//TODO
if(!from.equals(to)) {
Data data = new Data(from, to, timestamp);
dataList.add(data);
}
str = br.readLine();
}
br.close();
return dataList;
}
public static TreeSet<LabelEdge> readEdgeSet(String filePath) throws Exception{
ArrayList<Data> dataList = readData(filePath);
TreeSet<LabelEdge> edgeSet = new TreeSet();
for(int i = 0; i < dataList.size(); i++){
Data data = dataList.get(i);
edgeSet.add(new LabelEdge(data.from, data.to));
}
return edgeSet;
}
public static TreeSet<LabelEdge> getChangeSet(String incPath, String decPath) throws Exception{
TreeSet<LabelEdge> incSet = readEdgeSet(incPath);
TreeSet<LabelEdge> decSet = readEdgeSet(decPath);
TreeSet<LabelEdge> changeSet = new TreeSet();
Iterator<LabelEdge> it = decSet.iterator();
while(it.hasNext()){
LabelEdge edge = it.next();
if(!incSet.contains(edge)){
edge.weight *= -1; //mark this edge as removed
changeSet.add(edge);
}
}
changeSet.addAll(incSet);
return changeSet;
}
public static void writeGraph(TreeSet<LabelEdge> edgeSet, String graphPath) throws Exception{
BufferedWriter bw = new BufferedWriter(new FileWriter(graphPath));
Iterator<LabelEdge> it = edgeSet.iterator();
while(it.hasNext()){
LabelEdge edge = it.next();
bw.write(edge + "\r\n");
}
bw.close();
}
public ArrayList<Data> readAllData(String incPath, int from) throws Exception{
ArrayList<Data> dataList = new ArrayList();
while(true){
String incFilePath = FileUtil.extendFileName(incPath, "_" + from);
File incFile = new File(incFilePath);
if(!incFile.exists())
break;
dataList.addAll(readData(incFilePath));
}
return dataList;
}
/**
* Generate the graph file from the interaction data
* @param dataPath
* @param graphPath
* @throws Exception
*/
public static void generateGraph(String dataPath, String graphPath) throws Exception{
TreeSet<LabelEdge> edgeSet = new TreeSet();
BufferedReader br = new BufferedReader(new FileReader(dataPath));
BufferedWriter bw = new BufferedWriter(new FileWriter(graphPath));
String str = br.readLine();
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
String srcId = token.nextToken();
String destId = token.nextToken();
if(!edgeSet.contains(new LabelEdge(srcId, destId))){
edgeSet.add(new LabelEdge(srcId, destId));
bw.write(srcId + "\t" + destId + "\t1\r\n");
}
str =br.readLine();
}
br.close();
bw.close();
}
/**
* generate the aggregate graph from the interaction data
* @param incPath
* @param graphPath
* @throws Exception
*/
public static void generateAggregateGraphs(String incPath, String graphPath) throws Exception{
TreeSet<LabelEdge> edgeSet = new TreeSet();
int i = 0;
File incFile = new File(FileUtil.extendFileName(incPath, "_" + i));
ArrayList<String> edgeList = new ArrayList();
while(incFile.exists()){
System.out.println("Generating graph: " + i);
//read data
BufferedReader br = new BufferedReader(new FileReader(incFile));
String str = br.readLine();
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
String srcId = token.nextToken();
String destId = token.nextToken();
if(!edgeSet.contains(new LabelEdge(srcId, destId))){
edgeSet.add(new LabelEdge(srcId, destId));
edgeList.add(srcId + "\t" + destId + "\t" + "1");
}
str =br.readLine();
}
br.close();
//write graph file
BufferedWriter bw = new BufferedWriter(new FileWriter(FileUtil.extendFileName(graphPath, "_" + i)));
for(int j = 0; j < edgeList.size(); j++){
bw.write(edgeList.get(j) + "\r\n");
}
bw.close();
i++;
incFile = new File(FileUtil.extendFileName(incPath, "_" + i));
}
}
public static void generateTemporalGraphs(String incPath, String decPath, String graphPath) throws Exception{
System.out.println("Generating graph 0...");
TreeSet<LabelEdge> edgeSet = FileUtil.readEdgeSet(FileUtil.extendFileName(incPath, "_0"));
FileUtil.writeGraph(edgeSet, FileUtil.extendFileName(graphPath, "_0"));
int i = 1;
File incFile = new File(FileUtil.extendFileName(incPath, "_" + i));
while(incFile.exists()){
System.out.println("Generating graph " + i + "...");
TreeSet<LabelEdge> decSet = FileUtil.readEdgeSet(FileUtil.extendFileName(decPath, "_" + i));
TreeSet<LabelEdge> incSet = FileUtil.readEdgeSet(FileUtil.extendFileName(incPath, "_" + i));
edgeSet.removeAll(decSet);
edgeSet.addAll(incSet);
FileUtil.writeGraph(edgeSet, FileUtil.extendFileName(graphPath, "_" + i));
i++;
incFile = new File(FileUtil.extendFileName(incPath, "_" + i));
}
}
public static void uniqIteraData(String inputPath, String outputPath) throws Exception{
BufferedReader br = new BufferedReader(new FileReader(inputPath));
BufferedWriter bw = new BufferedWriter(new FileWriter(outputPath));
TreeSet<String> dataSet = new TreeSet();
String str = br.readLine();
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
String from = token.nextToken();
String to = token.nextToken();
String time = token.nextToken();
String min = from;
String max = to;
if(from.compareTo(to) > 0){
min = to;
max = from;
}
if(!dataSet.contains(min + "\t" + max)){
bw.write(from + "\t" + to + "\t" + time + "\r\n");
dataSet.add(min + "\t" + max);
}
str = br.readLine();
}
br.close();
bw.close();
}
public static HashMap checkCommunityStructure(String graphPath, String comPath) throws Exception{
//initialize
HashMap<String, Integer> nodeDict = getDict(graphPath);
int nodes = nodeDict.size();
int links = 0;
double m2 = 0;
ArrayList<ArrayList<Pair>> topology = new ArrayList(nodes);
TreeSet<Link> linkSet = new TreeSet();
for(int i = 0; i < nodes; i++)
topology.add(new ArrayList());
//read graph
BufferedReader br = new BufferedReader(new FileReader(graphPath));
String str = br.readLine();
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
String from = token.nextToken();
String to = token.nextToken();
double w = new Double(token.nextToken());
int src = nodeDict.get(from);
int dest = nodeDict.get(to);
Link link = new Link(src, dest);
if(!linkSet.contains(link) && src != dest){
linkSet.add(link);
topology.get(src).add(new Pair(dest, w));
topology.get(dest).add(new Pair(src, w));
links += 2;
m2 += 2;
}
str = br.readLine();
}
br.close();
//read community structure
int comId = 0;
ArrayList<Integer> n2c = new ArrayList();
for(int i = 0; i < nodes; i++)
n2c.add(-1);
br = new BufferedReader(new FileReader(comPath));
str = br.readLine();
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
while(token.hasMoreTokens()){
int nodeId = nodeDict.get(token.nextToken());
n2c.set(nodeId, comId);
}
comId++;
str = br.readLine();
}
//Compute modularity
ArrayList<Double> in = new ArrayList();
ArrayList<Double> tot = new ArrayList();
for(int i = 0; i < comId; i++){
in.add(0.0);
tot.add(0.0);
}
for(int i = 0; i < nodes; i++){
ArrayList<Pair> neighList = topology.get(i);
int src = i;
int srcCom = n2c.get(src);
for(int j = 0; j < neighList.size(); j++){
Pair p = neighList.get(j);
int dest = p.key;
int destCom = n2c.get(dest);
if(srcCom == destCom)
in.set(srcCom, in.get(srcCom) + 1);
}
tot.set(srcCom, tot.get(srcCom) + neighList.size());
}
int nonEmptyCommunities = 0;
double mod = 0;
for(int i = 0; i < in.size(); i++){
if(tot.get(i) != 0){
nonEmptyCommunities++;
mod += in.get(i) / m2 - Math.pow(tot.get(i) / m2, 2);
}
else{
System.out.println("Empty community: " + i);
}
}
System.out.println("Nodes: " + nodes + " Links: " + links + " Communities: " + nonEmptyCommunities + "/" + in.size() + " Modularity: " + mod);
HashMap resultMap = new HashMap();
resultMap.put("nodes", nodes);
resultMap.put("links", links);
resultMap.put("communities", in.size());
resultMap.put("modularity", mod);
return resultMap;
}
}
| 27,325 | 32.446756 | 150 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/comm/CommFileUtil.java | package org.dzhuang.dynamic.comm;
import java.util.*;
import java.io.*;
import org.dzhuang.dynamic.util.Parameter;
import org.dzhuang.dynamic.util.Utility;
import org.dzhuang.dynamic.util.FileUtil;
public class CommFileUtil {
/**
* transform the community file from hierarchical format to format 1
* input format: nodeId(\t)type(\t)parentId
* output format (format 1): nodeId(\t)commId
* @param inputPath
* @param outputPath
*/
public static void hToN2c(String inputPath, String outputPath) throws Exception{
BufferedReader br = new BufferedReader(new FileReader(new File(inputPath)));
HashMap<Integer, Integer> commMap = new HashMap();
ArrayList<Integer> idList = new ArrayList();
String str = br.readLine();
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t, ");
int id = new Integer(token.nextToken());
String type = token.nextToken();
if(type.equals("node")){ //store all the non-community nodes
idList.add(id);
}
int pId = new Integer(token.nextToken()); //read the
commMap.put(id, pId);
str = br.readLine();
}
br.close();
//write the community file in format 1
BufferedWriter bw = new BufferedWriter(new FileWriter(outputPath));
int commId = 1; // renumber the community id
HashMap<Integer, Integer> idMap = new HashMap();
for(int i = 0; i < idList.size(); i++){
int id = idList.get(i); // get a node id
int pId = id;
while(commMap.get(pId) != -1) //find its top parent id, when pId is a top parent id, we have commMap.get(pId) == -1
pId = commMap.get(pId);
if(!idMap.containsKey(pId)){
idMap.put(pId, commId++);
}
bw.write(id + "\t" + idMap.get(pId) + "\r\n");
}
bw.close();
}
/**
* transform the community file from format 1 to format 2
* input format (format 1): nodeId(\t)commId
* output format (format 2): nodeId1 nodeId2 nodeId3 ... nodeIdk
* For format 2, each line contains the ids of nodes in one community
* @param inputPath
* @param outputPath
*/
public static void n2cToC2n(String inputPath, String outputPath) throws Exception{
TreeMap<Integer, ArrayList<Integer>> commToNode = new TreeMap();
BufferedReader br = new BufferedReader(new FileReader(inputPath));
String str = br.readLine();
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
int nodeId = new Integer(token.nextToken());
int commId = new Integer(token.nextToken());
if(!commToNode.containsKey(commId)){
commToNode.put(commId, new ArrayList());
}
commToNode.get(commId).add(nodeId);
str = br.readLine();
}
br.close();
//write the community file in format 2
ArrayList<Integer> sizeList = new ArrayList();
BufferedWriter bw = new BufferedWriter(new FileWriter(outputPath));
Iterator<Integer> it = commToNode.keySet().iterator();
while(it.hasNext()){
int commId = it.next();
ArrayList nodeList = commToNode.get(commId);
String nodeStr = "";
for(int i = 0; i < nodeList.size(); i++){
nodeStr += nodeList.get(i) + "\t";
}
bw.write(nodeStr + "\r\n");
sizeList.add(nodeList.size());
}
bw.close();
}
/**
* remove the nodes of which its community size is smaller than minSize
* @param commPath
* @param dictName
* @param minSize
* @return
*/
public static HashSet<String> getRemoveNodeSet(String commPath, int minSize) throws Exception{
BufferedReader br = new BufferedReader(new FileReader(commPath));
HashSet<String> nodeSet = new HashSet();
String str = br.readLine();
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
HashSet<String> tmpSet = new HashSet();
while(token.hasMoreTokens()){
tmpSet.add(token.nextToken());
}
if(tmpSet.size() < minSize)
nodeSet.addAll(tmpSet);
str = br.readLine();
}
br.close();
return nodeSet;
}
}
| 3,839 | 31.268908 | 119 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/comm/NMI.java | package org.dzhuang.dynamic.comm;
import java.util.*;
import java.io.*;
import org.dzhuang.dynamic.util.Parameter;
import org.dzhuang.dynamic.util.Utility;
public class NMI {
public static void main(String args[]) throws Exception{
double nmi = getNMI(Parameter.ROOT_PATH + "/enron/enron_comm_inc_04.txt",
Parameter.ROOT_PATH + "/enron/enron_comm_inc_05.txt");
System.out.println("NMI: " + nmi);
}
public static double getNMI_Old(String commFilePath1, String commFilePath2) throws Exception{
double nmi = 0;
HashMap<String, Integer> nodeDict = new HashMap(); //the mapping of node label to integer ID
//read the first partition
BufferedReader br = new BufferedReader(new FileReader(commFilePath1));
ArrayList<HashSet<Integer>> comm1 = new ArrayList();
int nodeId = 1;
String str = br.readLine();
while(str != null){
comm1.add(new HashSet());
StringTokenizer token = new StringTokenizer(str, "\t");
while(token.hasMoreTokens()){
String nodeLabel = token.nextToken();
if(!nodeDict.containsKey(nodeLabel)){
nodeDict.put(nodeLabel, nodeId++);
}
int node = nodeDict.get(nodeLabel);
comm1.get(comm1.size()-1).add(node);
}
str =br.readLine();
}
br.close();
//read the second partition
br = new BufferedReader(new FileReader(commFilePath2));
ArrayList<HashSet<Integer>> comm2 = new ArrayList();
str = br.readLine();
while(str != null){
comm2.add(new HashSet());
StringTokenizer token = new StringTokenizer(str, "\t");
while(token.hasMoreTokens()){
String nodeLabel = token.nextToken();
if(!nodeDict.containsKey(nodeLabel)){
nodeDict.put(nodeLabel, nodeId++);
}
int node = nodeDict.get(nodeLabel);
comm2.get(comm2.size()-1).add(node);
}
str =br.readLine();
}
br.close();
//Compute the matrix N
double N[][] = new double[comm1.size()][comm2.size()]; // the matrix N
double rowSum[] = new double[comm1.size()]; //the row sum of the matrix
double colSum[] = new double[comm2.size()]; // the col sum of the matrix
double sum = 0; //the sum of the matrix
for(int i = 0; i < comm1.size(); i++){
rowSum[i] = 0;
HashSet<Integer> set1 = comm1.get(i);
for(int j = 0; j < comm2.size(); j++){
if(i == 0)
colSum[j] = 0;
HashSet<Integer> set2 = comm2.get(j);
int commNum = Utility.getCommNum(set1, set2);
N[i][j] = commNum;
rowSum[i] += commNum;
colSum[j] += commNum;
sum += commNum;
}
}
//Compute the normalized mutual information
double part1 = 0, part2 = 0, part3 = 0; // the three parts of the NMI
//compute part 1
for(int i = 0; i < N.length; i++){
for(int j = 0; j < N[i].length; j++){
if(N[i][j] > 0)
part1 += -2 * N[i][j] * Math.log(N[i][j] * sum / (rowSum[i] * colSum[j]));
}
}
// compute part2
for(int i = 0; i < N.length; i++){
if(rowSum[i] > 0)
part2 += rowSum[i] * Math.log(rowSum[i] / sum);
}
//compute part 3
for(int j = 0; j < N[0].length; j++){
if(colSum[j] > 0)
part3 += colSum[j] * Math.log(colSum[j] / sum);
}
//compute the nmi
nmi = part1 / (part2 + part3);
return nmi;
}
public static double getNMI(String commFilePath1, String commFilePath2) throws Exception{
double nmi = 0;
HashMap<String, Integer> nodeDict = new HashMap(); //the mapping of node label to integer ID
//read the first partition
BufferedReader br = new BufferedReader(new FileReader(commFilePath1));
ArrayList<ArrayList<Integer>> comm1 = new ArrayList();
int nodeId = 1;
String str = br.readLine();
while(str != null){
ArrayList<Integer> nodeList = new ArrayList();
StringTokenizer token = new StringTokenizer(str, "\t");
while(token.hasMoreTokens()){
String nodeLabel = token.nextToken();
if(!nodeDict.containsKey(nodeLabel)){
nodeDict.put(nodeLabel, nodeId++);
}
int node = nodeDict.get(nodeLabel);
Utility.insertIntoList(nodeList, node);
}
comm1.add(nodeList);
str =br.readLine();
}
br.close();
//read the second partition
br = new BufferedReader(new FileReader(commFilePath2));
ArrayList<ArrayList<Integer>> comm2 = new ArrayList();
str = br.readLine();
while(str != null){
ArrayList<Integer> nodeList = new ArrayList();
StringTokenizer token = new StringTokenizer(str, "\t");
while(token.hasMoreTokens()){
String nodeLabel = token.nextToken();
if(!nodeDict.containsKey(nodeLabel)){
nodeDict.put(nodeLabel, nodeId++);
}
int node = nodeDict.get(nodeLabel);
Utility.insertIntoList(nodeList, node);
}
comm2.add(nodeList);
str =br.readLine();
}
br.close();
//Compute the matrix N
double N[][] = new double[comm1.size()][comm2.size()]; // the matrix N
double rowSum[] = new double[comm1.size()]; //the row sum of the matrix
double colSum[] = new double[comm2.size()]; // the col sum of the matrix
double sum = 0; //the sum of the matrix
for(int i = 0; i < comm1.size(); i++){
rowSum[i] = 0;
ArrayList<Integer> list1 = comm1.get(i);
for(int j = 0; j < comm2.size(); j++){
if(i == 0)
colSum[j] = 0;
ArrayList<Integer> list2 = comm2.get(j);
int commNum = Utility.getCommNum(list1, list2);
N[i][j] = commNum;
rowSum[i] += commNum;
colSum[j] += commNum;
sum += commNum;
}
}
//Compute the normalized mutual information
double part1 = 0, part2 = 0, part3 = 0; // the three parts of the NMI
//compute part 1
for(int i = 0; i < N.length; i++){
for(int j = 0; j < N[i].length; j++){
if(N[i][j] > 0)
part1 += -2 * N[i][j] * Math.log(N[i][j] * sum / (rowSum[i] * colSum[j]));
}
}
// compute part2
for(int i = 0; i < N.length; i++){
if(rowSum[i] > 0)
part2 += rowSum[i] * Math.log(rowSum[i] / sum);
}
//compute part 3
for(int j = 0; j < N[0].length; j++){
if(colSum[j] > 0)
part3 += colSum[j] * Math.log(colSum[j] / sum);
}
//compute the nmi
nmi = part1 / (part2 + part3);
return nmi;
}
}
| 5,970 | 29.620513 | 95 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/comm/CommFileFormat.java | package org.dzhuang.dynamic.comm;
public class CommFileFormat {
public static final int HIERARCHICAL = 0;
public static final int NODE_TO_COMM = 1;
public static final int COMM_TO_NODE = 2;
}
| 197 | 21 | 42 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/graph/Link.java | package org.dzhuang.dynamic.graph;
public class Link implements Comparable<Link>{
public int src;
public int dest;
public Link(int src, int dest){
if(src < dest){
this.src = src;
this.dest = dest;
}
else{
this.src = dest;
this.dest = src;
}
}
public int compareTo(Link o){
Link e = (Link)o;
if(src < e.src){
return -1;
}
else if(src > e.src){
return 1;
}
else{
if(dest < e.dest)
return -1;
else if(dest > e.dest)
return 1;
else
return 0;
}
}
}
| 519 | 12.684211 | 46 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/graph/Edge.java | package org.dzhuang.dynamic.graph;
import java.util.*;
public class Edge implements Comparable{
public static void main(String args[]){
TreeSet<Edge> edgeSet = new TreeSet();
edgeSet.add(new Edge(2,1));
System.out.println(edgeSet.contains(new Edge(1,2)));
}
public int src;
public int dest;
public double weight;
public Edge(int src, int dest){
if(src < dest){
this.src = src;
this.dest = dest;
}
else{
this.src = dest;
this.dest = src;
}
this.weight = 1;
}
public Edge(int src, int dest, double weight){
if(src < dest){
this.src = src;
this.dest = dest;
}
else{
this.src = dest;
this.dest = src;
}
this.weight = weight;
}
public int compareTo(Object o){
Edge e = (Edge)o;
if(src < e.src){
return -1;
}
else if(src > e.src){
return 1;
}
else{
if(dest < e.dest)
return -1;
else if(dest > e.dest)
return 1;
else
return 0;
}
}
}
| 941 | 14.7 | 54 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/graph/Pair.java | package org.dzhuang.dynamic.graph;
public class Pair implements Comparable<Pair>{
public int key;
public double value;
public Pair(int key, double value){
this.key = key;
this.value = value;
}
public int compareTo(Pair t){
if(this.value > t.value)
return 1;
else if(this.value < t.value)
return -1;
else return 0;
}
} | 344 | 17.157895 | 46 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/graph/Clique3.java | package org.dzhuang.dynamic.graph;
public class Clique3 {
public int nodes[];
public Clique3(int n1, int n2, int n3){
nodes = new int[]{n1, n2, n3};
}
public static boolean IsConnected(Clique3 c1, Clique3 c2){
boolean connected = false;
int counter = 0;
for(int i = 0; i < 3; i++){
for(int j = 0; j < 3; j++){
if(c1.nodes[i] == c2.nodes[j])
counter++;
}
}
if(counter == 2)
connected = true;
return connected;
}
public boolean equals(Clique3 clique){
boolean equal = false;
int counter = 0;
for(int i = 0; i < 3; i++){
for(int j = 0; j < 3; j++){
if(nodes[i] == clique.nodes[j])
counter++;
}
}
if(counter == 3)
equal = true;
return equal;
}
}
| 721 | 17.05 | 59 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/graph/PointStr.java | package org.dzhuang.dynamic.graph;
import org.dzhuang.dynamic.util.Parameter;
public class PointStr {
String i;
String j;
public PointStr(String i, String j){
this.i = i;
this.j = j;
}
public boolean equals(Object o){
PointStr p = (PointStr)o;
if(i == p.i && j == p.j)
return true;
else
return false;
}
public int hashCode(){
return i.hashCode()*Parameter.HASH_BASE + j.hashCode();
}
public String getI() {
return i;
}
public void setI(String i) {
this.i = i;
}
public String getJ() {
return j;
}
public void setJ(String j) {
this.j = j;
}
}
| 597 | 13.585366 | 57 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/graph/Data.java | package org.dzhuang.dynamic.graph;
public class Data implements Comparable{
public String from;
public String to;
public long timestamp;
public Data(String from, String to, long timestamp){
this.from = from;
this.to = to;
this.timestamp = timestamp;
}
public int compareTo(Object o){
Data data = (Data)o;
if(this.timestamp < data.timestamp)
return -1;
else if(this.timestamp > data.timestamp)
return 1;
else{
if(this.from.compareTo(data.from) < 0)
return -1;
else if(this.from.compareTo(data.from) > 0)
return 1;
else{
return this.to.compareTo(data.to);
}
}
}
public String toString(){
return from + "\t" + to + "\t" + timestamp;
}
}
| 696 | 19.5 | 53 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/graph/DirectedEdge.java | package org.dzhuang.dynamic.graph;
import java.util.*;
public class DirectedEdge implements Comparable{
public int src;
public int dest;
public double weight;
public DirectedEdge(int src, int dest){
this.src = src;
this.dest = dest;
this.weight = 1;
}
public DirectedEdge(int src, int dest, double weight){
this.src = src;
this.dest = dest;
this.weight = weight;
}
public int compareTo(Object o){
Edge e = (Edge)o;
if(src < e.src){
return -1;
}
else if(src > e.src){
return 1;
}
else{
if(dest < e.dest)
return -1;
else if(dest > e.dest)
return 1;
else
return 0;
}
}
}
| 642 | 14.309524 | 55 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/graph/Point.java | package org.dzhuang.dynamic.graph;
import org.dzhuang.dynamic.util.Parameter;
public class Point {
public Point(int i, int j){
this.i = i;
this.j = j;
}
int i;
int j;
public boolean equals(Object o){
Point p = (Point)o;
if(i == p.i && j == p.j)
return true;
else
return false;
}
public int hashCode(){
return i*Parameter.HASH_BASE + j;
}
public int getI() {
return i;
}
public void setI(int i) {
this.i = i;
}
public int getJ() {
return j;
}
public void setJ(int j) {
this.j = j;
}
}
| 539 | 12.170732 | 42 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/graph/LabelEdge.java | package org.dzhuang.dynamic.graph;
import java.util.*;
public class LabelEdge implements Comparable{
public String src;
public String dest;
public double weight;
public LabelEdge(String src, String dest){
if(src.compareTo(dest) < 0){
this.src = src;
this.dest = dest;
}
else{
this.src = dest;
this.dest = src;
}
this.weight = 1;
}
public LabelEdge(String src, String dest, double weight){
if(src.compareTo(dest) < 0){
this.src = src;
this.dest = dest;
}
else{
this.src = dest;
this.dest = src;
}
this.weight = weight;
}
public int compareTo(Object o){
LabelEdge e = (LabelEdge)o;
if(src.compareTo(e.src) < 0){
return -1;
}
else if(src.compareTo(e.src) > 0){
return 1;
}
else{
if(dest.compareTo(e.dest) < 0)
return -1;
else if(dest.compareTo(e.dest) > 0)
return 1;
else
return 0;
}
}
public String toString(){
return src + "\t" + dest + "\t" + weight;
}
}
| 966 | 15.672414 | 58 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/OtherAlgorithms/LearnIncLr.java | package org.dzhuang.dynamic.OtherAlgorithms;
import java.util.*;
import java.io.*;
import java.text.*;
import org.dzhuang.dynamic.comm.NMI;
import org.dzhuang.dynamic.graph.*;
import org.dzhuang.dynamic.util.*;
import toolbox.lr.*;
public class LearnIncLr {
ArrayList<Double> neighWeight; //the weight from node u to its neighbor communities
ArrayList<Integer> neighPos; //the index of node u's neighbor communities
double neighConnMax; //the maximum connections from u to its neighbor communities
int neighLast;
public Graph g; //the graph
public ComGraph cg; // The community graph (each node represents a community)
public Param param; // the parameters generated by the Logistic Regression Model
public int size; //the number of communities, during iterations, there may be empty communities
ArrayList<Integer> n2c; // the belonging ship from nodes to communities
ArrayList<Double> n2cIn; //the connections from each node to its current community
ArrayList<Double> in, tot; //the inner and total degree of the communities
public double runTime;
public double L1 = 0.5;
public int MAX_MERGE_SIZE = 8;
//The empty constructor
public LearnIncLr(){}
/**
* Initialize the heuristic incremental algorithm
* @param graphPath
* @param comPath
* @param param - the parameters generated by the Logistic Regression Model
* @param comParam - the comParam is used to classify community nodes, while param is used to classify nodes
* @throws Exception
*/
public void init2(String graphPath, String comPath) throws Exception{
readGraph(graphPath);
readCommunity(comPath);
initComGraph();
}
public void init(String graphPath, String comPath, Param param) throws Exception{
this.param = param;
readGraph(graphPath);
readCommunity(comPath);
initComGraph();
}
public void init(String graphPath, String comPath) throws Exception{
System.out.println("Initializing...");
readGraph(graphPath);
System.out.println("Graph read! Nodes: " + g.nbNodes + " Edges: " + g.totalWeight/2);
readCommunity(comPath);
}
public HashMap increase(String incPath, int maxPoints, String commOutPath) throws Exception{
HashMap resultMap = new HashMap();
HashMap<String, Integer> nodeDict = g.nodeDict;
ArrayList<Float> modList = new ArrayList();
ArrayList<Float> timeList = new ArrayList();
ArrayList<Integer> comList = new ArrayList();
for(int point = 0; point < maxPoints; point++){
long t1 = System.currentTimeMillis();
File incFile = new File(FileUtil.extendFileName(incPath, "_" + (point+1)));
if(!incFile.exists())
break;
ArrayList<Data> dataList = FileUtil.readData(incFile.getAbsolutePath());
int start = 0;
while(start < dataList.size()){
TreeMap<Link, Double> deltaG = new TreeMap();
start = readNextBatch(deltaG, dataList, start, FileUtil.BY_MONTH); //read the next batch of incremental data into linkSet
if(deltaG.size() == 0) // if there is no change
continue;
updateCommunityStructure(deltaG);
}
long t2= System.currentTimeMillis();
double mod = modularity();
float time = (float)(t2-t1)/1000;
int communities = nonEmptyCommunities();
this.writeCommunity(FileUtil.extendFileName(commOutPath, "_" + (point+1)));
modList.add((float)mod);
timeList.add(time);
comList.add(communities);
System.out.println("Q" + (point+1) + ": " + (float)mod + " Time: " + time + " Communities: " + communities);
//outputCommunityStatistics();
}
resultMap.put("modList", modList);
resultMap.put("timeList", timeList);
resultMap.put("comList", comList);
return resultMap;
}
/**
* Run the incremental algorithm without outputting the community structure
* @param incPath
* @param maxPoints
* @param baseComPath
* @return
* @throws Exception
*/
public HashMap increaseNoComOutput(String incPath, int maxPoints, String dataSet) throws Exception{
long t0_1 = System.currentTimeMillis();
HashMap resultMap = new HashMap();
HashMap<String, Integer> nodeDict = g.nodeDict;
ArrayList<Double> modList = new ArrayList();
ArrayList<Long> timeList = new ArrayList();
ArrayList<Integer> comList = new ArrayList();
long t0_2 = System.currentTimeMillis();
for(int point = 0; point < maxPoints; point++){
long t1 = System.currentTimeMillis();
File incFile = new File(FileUtil.extendFileName(incPath, "_" + (point+1)));
if(!incFile.exists())
break;
ArrayList<Data> dataList = FileUtil.readData(incFile.getAbsolutePath());
//TODO
this.param=LearnIncLr.readParam("data2/"+dataSet+"/"+dataSet+"_param_LR_"+point+".txt");
int start = 0;
while(start < dataList.size()){
TreeMap<Link, Double> deltaG = new TreeMap();
start = readNextBatch(deltaG, dataList, start, FileUtil.BY_MONTH); //read the next batch of incremental data into linkSet
if(deltaG.size() == 0) // if there is no change
continue;
updateCommunityStructure(deltaG);
}
double mod = modularity();
int communities = nonEmptyCommunities();
String tmpComPath = "comm.tmp";
writeCommunity(tmpComPath);
this.writeCommunity("data2/"+dataSet+"/"+dataSet+"_LearnIncLr_community_"+(point+1)+".txt");
modList.add(mod);
comList.add(communities);
FileUtil.deleteFile(tmpComPath);
long t2= System.currentTimeMillis();
long time = t2-t1+t0_2-t0_1;
timeList.add(time);
System.out.println("Q" + (point+1) + ": " + mod + " Time: " + time + " Communities: " + communities);
//outputCommunityStatistics();
}
resultMap.put("modList", modList);
resultMap.put("timeList", timeList);
resultMap.put("comList", comList);
return resultMap;
}
public HashMap increasePeriod(String incPath, int periodMonth, String baseComPath) throws Exception{
HashMap resultMap = new HashMap();
HashMap<String, Integer> nodeDict = g.nodeDict;
ArrayList<Float> modList = new ArrayList();
ArrayList<Float> timeList = new ArrayList();
ArrayList<Integer> comList = new ArrayList();
ArrayList<Float> nmiList = new ArrayList();
ArrayList<Data> dataList = new ArrayList();
for(int point = 0; point < 10000; point++){
File incFile = new File(FileUtil.extendFileName(incPath, "_" + (point+1)));
if(!incFile.exists()){
if(dataList.size() > 0){
TreeMap<Link, Double> deltaG = new TreeMap();
readBatch(deltaG, dataList, 0, periodMonth);
dataList = new ArrayList();
long t1 = System.currentTimeMillis();
updateCommunityStructure(deltaG);
long t2 = System.currentTimeMillis();
double mod = modularity();
float time = (float)(t2-t1)/1000;
int communities = nonEmptyCommunities();
String realComPath = FileUtil.extendFileName(baseComPath, "_" + (point));
String tmpComPath = "comm.tmp";
this.writeCommunity(tmpComPath);
float nmi = (float)NMI.getNMI(realComPath, tmpComPath);
modList.add((float)mod);
timeList.add(time);
comList.add(communities);
nmiList.add(nmi);
FileUtil.deleteFile(tmpComPath);
System.out.println("Q" + (point+1) + ": " + (float)mod + " Time: " + time + " Communities: " + communities + " NMI: " + nmi);
}
break;
}
dataList.addAll(FileUtil.readData(incFile.getAbsolutePath()));
if((point+1) % periodMonth == 0){
TreeMap<Link, Double> deltaG = new TreeMap();
readBatch(deltaG, dataList, 0, periodMonth);
dataList = new ArrayList();
long t1 = System.currentTimeMillis();
updateCommunityStructure(deltaG);
long t2 = System.currentTimeMillis();
double mod = modularity();
float time = (float)(t2-t1)/1000;
int communities = nonEmptyCommunities();
String realComPath = FileUtil.extendFileName(baseComPath, "_" + (point+1));
String tmpComPath = "comm.tmp";
this.writeCommunity(tmpComPath);
float nmi = (float)NMI.getNMI(realComPath, tmpComPath);
modList.add((float)mod);
timeList.add(time);
comList.add(communities);
nmiList.add(nmi);
FileUtil.deleteFile(tmpComPath);
System.out.println("Q" + (point+1) + ": " + (float)mod + " Time: " + time + " Communities: " + communities + " NMI: " + nmi);
//outputCommunityStatistics();
}
}
resultMap.put("modList", modList);
resultMap.put("timeList", timeList);
resultMap.put("comList", comList);
resultMap.put("nmiList", nmiList);
return resultMap;
}
public HashMap increaseInitial(String incPath, int initPoint, String baseComPath) throws Exception{
HashMap resultMap = new HashMap();
HashMap<String, Integer> nodeDict = g.nodeDict;
ArrayList<Float> modList = new ArrayList();
ArrayList<Float> timeList = new ArrayList();
ArrayList<Integer> comList = new ArrayList();
ArrayList<Float> nmiList = new ArrayList();
for(int point = initPoint; point < 10000; point++){
long t1 = System.currentTimeMillis();
File incFile = new File(FileUtil.extendFileName(incPath, "_" + (point+1)));
if(!incFile.exists())
break;
ArrayList<Data> dataList = FileUtil.readData(incFile.getAbsolutePath());
int start = 0;
while(start < dataList.size()){
TreeMap<Link, Double> deltaG = new TreeMap();
start = readNextBatch(deltaG, dataList, start, FileUtil.BY_MONTH); //read the next batch of incremental data into linkSet
if(deltaG.size() == 0) // if there is no change
continue;
updateCommunityStructure(deltaG);
}
long t2= System.currentTimeMillis();
double mod = modularity();
float time = (float)(t2-t1)/1000;
int communities = nonEmptyCommunities();
String realComPath = FileUtil.extendFileName(baseComPath, "_" + (point+1));
String tmpComPath = "comm.tmp";
this.writeCommunity(tmpComPath);
float nmi = (float)NMI.getNMI(realComPath, tmpComPath);
modList.add((float)mod);
timeList.add(time);
comList.add(communities);
nmiList.add(nmi);
FileUtil.deleteFile(tmpComPath);
System.out.println("Q" + (point+1) + ": " + (float)mod + " Time: " + time + " Communities: " + communities + " NMI: " + nmi);
//outputCommunityStatistics();
}
resultMap.put("modList", modList);
resultMap.put("timeList", timeList);
resultMap.put("comList", comList);
resultMap.put("nmiList", nmiList);
return resultMap;
}
public static Param readParam(String paramPath) throws Exception{
BufferedReader br = new BufferedReader(new FileReader(paramPath));
String str = br.readLine();
str = str.substring(str.indexOf('=')+1);
StringTokenizer token = new StringTokenizer(str, "[, ];");
int paramNum = token.countTokens();
Param param = new Param(paramNum, 0);
for(int i = 0; i < paramNum;i++){
param.data[i] = new Double(token.nextToken());
}
br.close();
return param;
}
public static double readPrecision(String paramPath) throws Exception{
BufferedReader br = new BufferedReader(new FileReader(paramPath));
String str = br.readLine();
str = br.readLine();
str = str.substring(str.indexOf('=')+1, str.lastIndexOf(';'));
double precision = new Double(str);
br.close();
return precision;
}
public static double readRecall(String paramPath) throws Exception{
BufferedReader br = new BufferedReader(new FileReader(paramPath));
String str = br.readLine();
str = br.readLine();
str = br.readLine();
str = str.substring(str.indexOf('=')+1, str.lastIndexOf(';'));
double recall = new Double(str);
br.close();
return recall;
}
public void readGraph(String graphPath) throws Exception{
this.g = new Graph(graphPath);
neighWeight = new ArrayList();
neighPos = new ArrayList();
n2c = new ArrayList();
n2cIn = new ArrayList();
in = new ArrayList();
tot = new ArrayList();
size = g.nbNodes;
neighWeight.ensureCapacity(size);
neighPos.ensureCapacity(size);
for(int i = 0; i < size; i++){
neighWeight.add(new Double(-1.0));
neighPos.add(new Integer(-1));
}
neighLast = 0;
n2c.ensureCapacity(size);
n2cIn.ensureCapacity(size);
in.ensureCapacity(size);
tot.ensureCapacity(size);
//initialize
for(int i = 0; i < size; i++){
n2c.add(i);
n2cIn.add(0.0);
tot.add(g.weightedDegree(i));
in.add(g.nbSelfLoops(i));
}
}
public void readCommunity(String commPath) throws Exception{
BufferedReader br = new BufferedReader(new FileReader(commPath));
String str = br.readLine();
int commId = 0;
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
while(token.hasMoreTokens()){
int nodeId = g.nodeDict.get(token.nextToken());
n2c.set(nodeId, commId);
}
commId++;
str = br.readLine();
}
br.close();
// update the tot and in of the community structure
for(int i = 0; i < size; i++){
tot.set(i, 0.0);
in.set(i, 0.0);
}
for(int i = 0; i < g.nbNodes; i++){
int srcCom = n2c.get(i);
ArrayList<Pair> neighList = g.topology.get(i);
for(int j = 0; j < neighList.size(); j++){
Pair p = neighList.get(j);
int dest = p.key;
int destCom = n2c.get(dest);
double w = p.value;
if(srcCom == destCom){ //if i and dest are in the same community
n2cIn.set(i, n2cIn.get(i) + w);
in.set(srcCom, in.get(srcCom) + w); //update in value of this community
}
tot.set(srcCom, tot.get(srcCom) + w); //update the tot value of community C(i)
}
}
}
public void initComGraph(){
cg = new ComGraph();
for(int i = 0; i < g.topology.size(); i++){
int srcCom = n2c.get(i);
if(!cg.topology.containsKey(srcCom))
cg.topology.put(srcCom, new ComNode());
ComNode srcNode = cg.topology.get(srcCom);
srcNode.size++;
ArrayList<Pair> neighborList = g.topology.get(i);
for(int j = 0; j < neighborList.size(); j++){
Pair p = neighborList.get(j);
int destCom = n2c.get(p.key);
double w = p.value;
if(srcCom == destCom){ //if i and j are in the same community
srcNode.inK += w;
}
srcNode.totK += w;
if(!srcNode.neighbors.containsKey(destCom)){
srcNode.neighbors.put(destCom, w);
}
else{
srcNode.neighbors.put(destCom, srcNode.neighbors.get(destCom) + w);
}
cg.totalWeight += w;
}
}
cg.nbNodes = cg.topology.size();
}
//read the next batch of data, put them into a change graph represented by deltaG
public int readNextBatch(TreeMap<Link, Double> deltaG, ArrayList<Data> dataList, int start, int periodType) throws Exception{
int end = start;
long startTime = dataList.get(start).timestamp;
long endTime = startTime + 1; //update by Second
if(periodType == FileUtil.BY_MINUTE)
endTime = startTime + 60;
else if(periodType == FileUtil.BY_HOUR)
endTime = startTime + 3600;
else if(periodType == FileUtil.BY_DAY)
endTime = startTime + 24 * 3600;
else if(periodType == FileUtil.BY_WEEK)
endTime = startTime + 7 * 24 * 3600;
else if(periodType == FileUtil.BY_MONTH)
endTime = startTime + 31 * 24 * 3600;
else if(periodType == FileUtil.BY_TWO_MONTH)
endTime = startTime + 62 * 24 * 3600;
else if(periodType == FileUtil.BY_YEAR)
endTime = startTime + 365 * 24 * 3600;
//parse the data
for(end = start; end < dataList.size(); end++){
Data data = dataList.get(end);
if(data.timestamp >= endTime)
break;
if(!g.nodeDict.containsKey(data.from))
g.nodeDict.put(data.from, g.nodeDict.size());
if(!g.nodeDict.containsKey(data.to))
g.nodeDict.put(data.to, g.nodeDict.size());
int src = g.nodeDict.get(data.from);
int dest = g.nodeDict.get(data.to);
Link link = new Link(src, dest);
if(src < g.nbNodes && dest < g.nbNodes && g.linkMap.containsKey(link)){
continue;
}
deltaG.put(link, 1.0);
}
return end;
}
public int readBatch(TreeMap<Link, Double> deltaG, ArrayList<Data> dataList, int start, int periodMonth) throws Exception{
int end = start;
long startTime = dataList.get(start).timestamp;
long endTime = DateUtil.nextKMonth(startTime, periodMonth);
//parse the data
for(end = start; end < dataList.size(); end++){
Data data = dataList.get(end);
if(data.timestamp >= endTime)
break;
if(!g.nodeDict.containsKey(data.from))
g.nodeDict.put(data.from, g.nodeDict.size());
if(!g.nodeDict.containsKey(data.to))
g.nodeDict.put(data.to, g.nodeDict.size());
int src = g.nodeDict.get(data.from);
int dest = g.nodeDict.get(data.to);
Link link = new Link(src, dest);
if(src < g.nbNodes && dest < g.nbNodes && g.linkMap.containsKey(link)){
continue;
}
deltaG.put(link, 1.0);
}
if(end == dataList.size() && dataList.get(end-1).timestamp < endTime) //if the final batch of data is incomplete
end = -1;
return end;
}
/**
* update the community structure according to the change of the graph
* @param deltaG - the change of graph
* @throws Exception
*/
public void updateCommunityStructure(TreeMap<Link, Double> deltaG) throws Exception{
//Firstly extend the capacity of the Graph and Community
int oldNbNodes = g.nbNodes; // oldNbNodes is used to identify the newly added nodes
while(size < g.nodeDict.size()){
cg.nbNodes++;
ComNode comNode = new ComNode();
comNode.size = 1;
cg.topology.put(size, comNode);
neighWeight.add(-1.0);
neighPos.add(-1);
n2c.add(n2c.size());
n2cIn.add(0.0);
in.add(0.0);
tot.add(0.0);
g.topology.add(new ArrayList<Pair>());
g.nbNodes++;
size++;
}
//read the change part of the graph from deltaG
// we put links into an array because we will use them again
Link links[] = (Link []) deltaG.keySet().toArray(new Link[deltaG.size()]);
for(int i = 0; i < links.length; i++){
Link link = links[i];
double w = deltaG.get(link);
//update the graph topology
g.linkMap.put(new Link(link.src, link.dest), w);
g.topology.get(link.src).add(new Pair(link.dest, w));
g.nbLinks++;
g.totalWeight += w;
if(link.src != link.dest){
g.topology.get(link.dest).add(new Pair(link.src, w));
g.nbLinks++;
g.totalWeight += w;
}
}
// initialize the community structure by putting every new node into a singleton community
TreeSet<Integer> nodeToUpdate = new TreeSet();
for(int i = 0; i < links.length; i++){
Link link = links[i];
double w = deltaG.get(link);
int srcCom = n2c.get(link.src);
int destCom = n2c.get(link.dest);
ComNode srcNode = cg.topology.get(srcCom);
ComNode destNode = cg.topology.get(destCom);
if(srcCom == destCom){
in.set(srcCom, in.get(srcCom) + 2*w);
srcNode.inK += 2*w;
n2cIn.set(link.src, n2cIn.get(link.src) + w);
n2cIn.set(link.dest, n2cIn.get(link.dest) + w);
}
tot.set(srcCom, tot.get(srcCom) + w);
tot.set(destCom, tot.get(destCom) + 1*w);
srcNode.totK += w;
destNode.totK += w;
if(srcNode.neighbors.containsKey(destCom)){
srcNode.neighbors.put(destCom, srcNode.neighbors.get(destCom) + 1);
destNode.neighbors.put(srcCom, destNode.neighbors.get(srcCom) + 1);
}
else{
srcNode.neighbors.put(destCom, 1.0);
destNode.neighbors.put(srcCom, 1.0);
}
cg.totalWeight += 2*w;
nodeToUpdate.add(link.src);
nodeToUpdate.add(link.dest);
}
//put the nodes in deltaG in to Priority Queue
int totalMove = 0;
ArrayList<Integer> nodeList = new ArrayList();
Iterator<Integer> it = nodeToUpdate.iterator();
while(it.hasNext()){
int node = it.next();
Sample sample = getNodeSample(node);
if(sample.type == SampleType.POSITIVE)
nodeList.add(node);
}
//System.out.println("Move node: " + nodeList.size() + "/" + nodeToUpdate.size() + " new: " + (g.nbNodes-oldNbNodes));
while(nodeList.size() > 0){
//System.out.println("Node move: " + nodeList.size());
totalMove += nodeList.size();
HashSet<Integer> nextSet = refine(nodeList); //the core step
nodeList.clear();
nodeList.addAll(nextSet);
}
//System.out.println("Total to move: " + totalMove);
//after the nodes are moved, we next move the communities
int totalMerge = 0;
HashMap<Integer, ArrayList<Integer>> c2n = getCommunityToNode();
ArrayList<Integer> comList = new ArrayList();
comList.addAll(c2n.keySet());
//System.out.println("Move com: " + comList.size() + "/" + c2n.size());
while(comList.size() > 0){
totalMerge += comList.size();
//System.out.println("Come move: " + comList.size());
HashSet<Integer> nextSet = refineCom(c2n, comList);
comList.clear();
comList.addAll(nextSet);
}
//System.out.println("Total to merge: " + totalMerge);
}
public HashSet<Integer> refine(ArrayList<Integer> nodeList){
//System.out.println("Node to move: " + nodeList.size());
HashSet<Integer> updateSet = new HashSet();
int nbMoves = 0;
//move node from its current community to the one which gives the maximum gain in modularity
for(int nodeTmp = 0; nodeTmp < nodeList.size(); nodeTmp++){
int node = nodeList.get(nodeTmp);
int nodeComm = n2c.get(node);
double wDegree = g.weightedDegree(node);
neighComm(node);
remove(node, nodeComm, neighWeight.get(nodeComm));
int bestComm = nodeComm;
double bestNbLinks = 0;
double bestIncrease = 0;
for(int i = 0; i < neighLast; i++){
double increase = modularityGain(node, neighPos.get(i), neighWeight.get(neighPos.get(i)), wDegree);
if(increase > bestIncrease){
bestComm = neighPos.get(i);
bestNbLinks = neighWeight.get(bestComm);
bestIncrease = increase;
}
}
//before insert node into bestComm, we should update the cg
if(bestComm != nodeComm){
for(int i = 0; i < neighLast; i++){
int neighCom = neighPos.get(i);
//System.out.println(neighWeight.get(neighCom));
if(neighCom != nodeComm){ //first move node out of nodeComm
cg.increaseWeight(nodeComm, neighCom, -1 * neighWeight.get(neighCom));
}
}
ComNode comNode = cg.topology.get(nodeComm);
comNode.inK -= 2 * neighWeight.get(nodeComm);
if(comNode.neighbors.containsKey(nodeComm))
comNode.neighbors.put(nodeComm, comNode.neighbors.get(nodeComm) - 2 * neighWeight.get(nodeComm));
comNode.totK -= wDegree;
comNode.size--;
for(int i = 0; i < neighLast; i++){
int neighCom = neighPos.get(i);
if(neighCom != bestComm){ // secondly move node into bestComm
cg.increaseWeight(bestComm, neighCom, neighWeight.get(neighCom));
}
}
ComNode bestNode = cg.topology.get(bestComm);
bestNode.inK += 2 * neighWeight.get(bestComm);
if(bestNode.neighbors.containsKey(bestComm))
bestNode.neighbors.put(bestComm, bestNode.neighbors.get(bestComm) + 2 * neighWeight.get(bestComm));
else
bestNode.neighbors.put(bestComm, 2 * neighWeight.get(bestComm));
bestNode.totK += wDegree;
bestNode.size++;
//If the community is empty, remove it.
if(comNode.totK == 0){
cg.removeEmptyComm(nodeComm);
//System.out.println("Community removed!");
}
}
insert(node, bestComm, bestNbLinks);
n2cIn.set(node, bestNbLinks);
if(bestComm != nodeComm){
nbMoves++;
ArrayList<Pair> neighbors = g.topology.get(node);
for(int i = 0; i < neighbors.size(); i++){
Pair p = neighbors.get(i);
int neigh = p.key;
double w = p.value;
int neighCom = n2c.get(neigh);
if(neighCom == nodeComm)
n2cIn.set(neigh, n2cIn.get(neigh) - w);
else if(neighCom == bestComm)
n2cIn.set(neigh, n2cIn.get(neigh) + w);
Sample sample = getNodeSample(neigh);
if(sample.type == SampleType.POSITIVE)
updateSet.add(neigh);
else
updateSet.remove(neigh);
}
}
}
//System.out.println("nbMoves: " + nbMoves);
return updateSet;
}
public HashSet<Integer> refineCom(HashMap<Integer, ArrayList<Integer>> c2n, ArrayList<Integer> comList){
HashSet<Integer> updateSet = new HashSet();
for(int comTmp = 0; comTmp < comList.size(); comTmp++){
int com = comList.get(comTmp);
ComNode node = cg.topology.get(com);
if(node.size > MAX_MERGE_SIZE){
continue;
}
double bestIncrease = 0;
int bestCom = com;
double bestConn = 0;
Iterator<Map.Entry<Integer, Double>> it = node.neighbors.entrySet().iterator();
while(it.hasNext()){
Map.Entry<Integer, Double> entry = it.next();
int neighCom = entry.getKey();
if(neighCom == com)
continue;
double neighConn = entry.getValue();
ComNode neighNode = cg.topology.get(neighCom);
// if(neighNode.size > g.nbNodes/10)
// continue;
double increase = neighConn - node.totK * neighNode.totK / cg.totalWeight;
if(increase > bestIncrease){
bestIncrease = increase;
bestCom = neighCom;
bestConn = neighConn;
}
}
if(bestCom != com){
cg.mergeComm(com, bestCom, bestConn);
mergeCommunity(c2n, com, bestCom, bestConn);
if(cg.topology.get(bestCom).size <= MAX_MERGE_SIZE)
updateSet.add(bestCom); //put this updated community into update Set
updateSet.remove(com);
//System.out.println("Inc: " + Parameter.df.format(bestIncrease) + "Com merge: " + com + " -> " + bestCom);
}
}
return updateSet;
}
public ArrayList<Integer> rankNodesByLR(Param param){
ArrayList<Integer> orderList = new ArrayList();
for(int i = 0; i < g.nbNodes; i++){
Sample sample = getNodeSample(i);
if(sample.type == SampleType.POSITIVE)
orderList.add(i);
}
System.out.println("Nodes to be moved: " + orderList.size());
//orderList = Utility.randomOrderList(g.nbNodes);
return orderList;
}
public double modularity(){
double q = 0;
double m2 = (double)g.totalWeight;
for(int i = 0; i < size; i++){
if(tot.get(i) > 0){
q += in.get(i)/m2 - Math.pow(tot.get(i).doubleValue()/m2, 2);
}
}
return q;
}
public int nonEmptyCommunities(){
TreeSet<Integer> comSet = new TreeSet();
for(int i = 0; i < n2c.size(); i++){
int com = n2c.get(i);
comSet.add(com);
}
return comSet.size();
}
public double modularityGain(int node, int comm, double dnodecomm, double wDegree){
double totc = tot.get(comm).doubleValue(); //sum of degree of nodes in comm
double degc = wDegree; //degree of node
double m2 = g.totalWeight; //2*total weight of the network
double dnc = dnodecomm; //the connections from node to comm
return (dnc - totc*degc/m2);
}
public void remove(int node, int comm, double dnodecomm){
tot.set(comm, tot.get(comm) - g.weightedDegree(node));
in.set(comm, in.get(comm) - 2*dnodecomm - g.nbSelfLoops(node));
n2c.set(node, -1);
}
public void insert(int node, int comm, double dnodecomm){
tot.set(comm, tot.get(comm) + g.weightedDegree(node));
in.set(comm, in.get(comm) + 2*dnodecomm + g.nbSelfLoops(node));
n2c.set(node, comm);
}
//move node form its current community to destCom
public void move(int node, int destCom){
neighComm(node);
int srcCom = n2c.get(node);
double wDegree = g.weightedDegree(node);
remove(node, n2c.get(node), neighWeight.get(srcCom));
if(srcCom != destCom){
for(int i = 0; i < neighLast; i++){
int neighCom = neighPos.get(i);
if(neighCom != srcCom){ //first move node out of nodeComm
cg.increaseWeight(srcCom, neighCom, -1 * neighWeight.get(neighCom));
}
}
ComNode comNode = cg.topology.get(srcCom);
comNode.inK -= 2 * neighWeight.get(srcCom);
if(comNode.neighbors.containsKey(srcCom))
comNode.neighbors.put(srcCom, comNode.neighbors.get(srcCom) - 2 * neighWeight.get(srcCom));
comNode.totK -= wDegree;
comNode.size--;
for(int i = 0; i < neighLast; i++){
int neighCom = neighPos.get(i);
if(neighCom != destCom){ // secondly move node into bestComm
cg.increaseWeight(destCom, neighCom, neighWeight.get(neighCom));
}
}
ComNode bestNode = cg.topology.get(destCom);
bestNode.inK += 2 * neighWeight.get(destCom);
if(bestNode.neighbors.containsKey(destCom))
bestNode.neighbors.put(destCom, bestNode.neighbors.get(destCom) + 2 * neighWeight.get(destCom));
else
bestNode.neighbors.put(destCom, 2 * neighWeight.get(destCom));
bestNode.totK += wDegree;
bestNode.size++;
//If the community is empty, remove it.
if(comNode.totK == 0){
cg.removeEmptyComm(srcCom);
//System.out.println("Community removed!");
}
}
insert(node, destCom, neighWeight.get(destCom));
n2cIn.set(node, neighWeight.get(destCom));
//update n2cIn
ArrayList<Pair> neighbors = g.topology.get(node);
for(int i = 0; i < neighbors.size(); i++){
Pair p = neighbors.get(i);
int neigh = p.key;
double w = p.value;
int neighCom = n2c.get(neigh);
if(neighCom == srcCom)
n2cIn.set(neigh, n2cIn.get(neigh) - w);
else if(neighCom == destCom)
n2cIn.set(neigh, n2cIn.get(neigh) + w);
}
}
//create a new singleton community for the node
public int insertSingleton(int node){
double k = g.weightedDegree(node);
int commId = 0; //find a usable community id
while(tot.get(commId) > 0)
commId++;
tot.set(commId, k);
in.set(commId, 0.0);
n2c.set(node, commId);
return commId;
}
// generate the neighborhood communities of node
// this operation will change list neighWeight, neighPos
public void neighComm(int node){
for(int i = 0; i < neighLast; i++)
neighWeight.set(neighPos.get(i), -1.0);
neighLast = 0;
neighConnMax = 0;
ArrayList<Pair> neighList = g.topology.get(node);
int deg = g.nbNeighbors(node);
//System.out.println("node: " + node + " n2c: " + n2c.get(node));
neighPos.set(0, n2c.get(node));
neighWeight.set(neighPos.get(0), 0.0);
neighLast = 1;
for(int i = 0; i < deg; i++){
int neigh = neighList.get(i).key;
int neighComm = n2c.get(neigh);
double neighW = neighList.get(i).value;
if(neigh != node){
if(neighWeight.get(neighComm).intValue() == -1){
neighWeight.set(neighComm, 0.0);
neighPos.set(neighLast++, neighComm);
}
neighWeight.set(neighComm, neighWeight.get(neighComm) + neighW);
if(neighComm != neighPos.get(0) && neighWeight.get(neighComm) > neighConnMax)
neighConnMax = neighWeight.get(neighComm);
}
}
}
public HashMap<Integer, ArrayList<Integer>> getCommunityToNode(){
HashMap<Integer, ArrayList<Integer>> c2n = new HashMap();
for(int i = 0; i < g.nbNodes; i++){
int com = n2c.get(i);
if(!c2n.containsKey(com))
c2n.put(com, new ArrayList());
c2n.get(com).add(i);
}
return c2n;
}
//merge the community from srcCom to destCom
public void mergeCommunity(HashMap<Integer, ArrayList<Integer>> c2n, int srcCom, int destCom, double conn){
ArrayList<Integer> sList = c2n.get(srcCom);
ArrayList<Integer> dList = c2n.get(destCom);
//firstly update n2cs
for(int i = 0; i < sList.size(); i++){
int node = sList.get(i);
int com = n2c.get(node);
ArrayList<Pair> neighbors = g.topology.get(node);
for(int j = 0; j < neighbors.size(); j++){
Pair p = neighbors.get(j);
int neigh = p.key;
double w = p.value;
int neighCom = n2c.get(neigh);
if(neighCom == destCom){
n2cIn.set(node, n2cIn.get(node) + w);
n2cIn.set(neigh, n2cIn.get(neigh) + w);
}
}
}
for(int i = 0; i < sList.size(); i++){
n2c.set(sList.get(i), destCom);
dList.add(sList.get(i));
}
in.set(destCom, in.get(destCom) + in.get(srcCom) + 2*conn);
tot.set(destCom, tot.get(destCom) + tot.get(srcCom));
in.set(srcCom, 0.0);
tot.set(srcCom, 0.0);
sList.clear();
}
//get the sample of a node according to the Logistic Regression Model
public Sample getNodeSample(int node){
double k = g.weightedDegree(node);
double kIn = n2cIn.get(node);
Sample sample = new Sample(new double[]{1, k, kIn}, SampleType.NEGATIVE);
sample.toLogValue(1);
double prob = LogisticRegression.getLogisticValue(sample, param);
if(prob >= L1)
sample.type = SampleType.POSITIVE;
return sample;
}
public void outputCommunityStatistics(){
int comNum = 0, maxSize=0, minSize=1000000;
float avgSize = 0;
HashMap<Integer, Integer> sizeMap = new HashMap();
ArrayList<Integer> sizeList = new ArrayList();
ArrayList<Float> modList = new ArrayList();
ArrayList<Float> inList = new ArrayList();
ArrayList<Float> totList = new ArrayList();
for(int i = 0; i < n2c.size(); i++){
int com = n2c.get(i);
if(!sizeMap.containsKey(com))
sizeMap.put(com, 0);
sizeMap.put(com, sizeMap.get(com) + 1);
}
Iterator<Integer> it = sizeMap.keySet().iterator();
double m2 = g.totalWeight;
while(it.hasNext()){
int com = it.next();
int size = sizeMap.get(com);
double mod = in.get(com)/m2 - Math.pow(tot.get(com).doubleValue()/m2, 2);
if(size > maxSize)
maxSize = size;
if(size < minSize)
minSize = size;
sizeList.add(size);
modList.add((float)(mod * m2));
inList.add((float)in.get(com).doubleValue());
totList.add((float)tot.get(com).doubleValue());
}
//sort the results by community size
int tmp1;
float tmp2;
for(int i = 0; i < sizeList.size()-1; i++){
for(int j = i+1; j < sizeList.size(); j++){
if(sizeList.get(i) > sizeList.get(j) || (sizeList.get(i) == sizeList.get(j) && totList.get(i) > totList.get(j))){
Utility.listSwap(sizeList, i, j);
Utility.listSwap(modList, i, j);
Utility.listSwap(inList, i, j);
Utility.listSwap(totList, i, j);
}
}
}
int com8 = 0, com5 = 0; //the number of communities which cotains 80% and 50% of the nodes
int totalSize = 0;
for(int i = sizeList.size()-1; i>=0; i--){
totalSize += sizeList.get(i);
if((double)totalSize / g.nbNodes < 0.8)
com8++;
if((double) totalSize / g.nbNodes < 0.5)
com5++;
}
comNum = sizeMap.size();
avgSize = g.nbNodes / comNum;
System.out.println("Modularity: " + (float)modularity() + " M2: " + g.totalWeight);
System.out.println("#Communities: " + comNum + " Average Size: " + avgSize + " Max Size: " + maxSize + " Min Size: " + minSize);
System.out.println("#Communities for 50% nodes: " + com5 + " #Communities for 80% nodes: " + com8);
// System.out.println("size=" + sizeList + ";");
// System.out.println("Qc=" + modList + ";");
// System.out.println("in=" + inList + ";");
// System.out.println("tot=" + totList + ";");
}
public void writeCommunity(String outPath) throws Exception{
HashMap<Integer, String> revDict = Utility.reverseDict(g.nodeDict);
HashMap<Integer, ArrayList<Integer>> comToNode = new HashMap();
for(int i = 0; i < n2c.size(); i++){
int com = n2c.get(i);
if(!comToNode.containsKey(com))
comToNode.put(com, new ArrayList());
comToNode.get(com).add(i);
}
//write community
BufferedWriter bw = new BufferedWriter(new FileWriter(outPath));
Iterator<Integer> it = comToNode.keySet().iterator();
while(it.hasNext()){
int com = it.next();
ArrayList<Integer> nodeList = comToNode.get(com);
bw.write(revDict.get(nodeList.get(0)));
for(int i = 1; i < nodeList.size(); i++){
bw.write("\t" + revDict.get(nodeList.get(i)));
}
bw.write("\r\n");
}
bw.close();
}
public void writeGraph(String outPath) throws Exception{
HashMap<Integer, String> revDict = Utility.reverseDict(g.nodeDict);
TreeSet<LabelEdge> edgeSet = new TreeSet();
Iterator<Link> it = g.linkMap.keySet().iterator();
while(it.hasNext()){
Link link = it.next();
String from = revDict.get(link.src);
String to = revDict.get(link.dest);
LabelEdge edge = new LabelEdge(from, to);
edgeSet.add(edge);
}
//write graph
BufferedWriter bw = new BufferedWriter(new FileWriter(outPath));
Iterator<LabelEdge> it1 = edgeSet.iterator();
while(it1.hasNext()){
LabelEdge edge = it1.next();
bw.write(edge.src + "\t" + edge.dest + "\t1\r\n");
}
bw.close();
}
/**
* Local definition of a graph
* @author shangjiaxing
*
*/
public class Graph{
public HashMap<String, Integer> nodeDict; //mapping the node label (String) to node id (Integer)
public int nbNodes; //number of nodes
public int nbLinks; //number of edges;
public double totalWeight; //sum of the weight of the links*2 (each link is calculated twice)
public ArrayList<ArrayList<Pair>> topology; //The matrix of the graph, the neighbors of i is denoted as topology.get(i)
public TreeMap<Link, Double> linkMap;
public Graph(){
nbNodes = 0;
nbLinks = 0;
totalWeight = 0;
topology = new ArrayList();
}
public Graph(String graphPath) throws Exception{
nodeDict = FileUtil.getDict(graphPath);
nbNodes = nodeDict.size();
topology = new ArrayList();
BufferedReader br = new BufferedReader(new FileReader(graphPath));
topology.ensureCapacity(nbNodes);
this.linkMap = new TreeMap();
for(int i = 0; i < nbNodes; i++)
topology.add(new ArrayList());
nbLinks = 0;
totalWeight = 0;
String str = br.readLine().trim();
while(str != null && !str.equals("")){
StringTokenizer token = new StringTokenizer(str, "\t");
int src = nodeDict.get(token.nextToken());
int dest = nodeDict.get(token.nextToken());
double weight = new Double(token.nextToken());
linkMap.put(new Link(src, dest), weight);
topology.get(src).add(new Pair(dest, weight));
nbLinks++;
totalWeight += weight; //to support weighted network
if(src != dest){
topology.get(dest).add(new Pair(src, weight));
nbLinks++;
totalWeight += weight;
}
str = br.readLine();
}
br.close();
}
// public double weightedDegree(int node){
// double wDegree = 0;
// ArrayList<Pair> neighList = topology.get(node);
// for(int i = 0; i < neighList.size(); i++){
// wDegree += neighList.get(i).second;
// }
// return wDegree;
// }
public double weightedDegree(int node){
return (double)g.topology.get(node).size();
}
public int nbNeighbors(int node){
return topology.get(node).size();
}
public double nbSelfLoops(int node){
ArrayList<Pair> neighList = topology.get(node);
for(int i = 0; i < neighList.size(); i++){
Pair p = neighList.get(i);
if(node == p.key)
return p.value;
}
return 0;
}
public int getNbNodes(){
return nbNodes;
}
public int getNbLinks(){
return nbLinks;
}
public ArrayList<ArrayList<Pair>> getTopology(){
return topology;
}
public double getTotalWeight(){
return totalWeight;
}
}
/**
* The community graph, where nodes represent communities and edges represent
* the connections among communities
* @author shangjiaxing
*
*/
public class ComGraph{
public int nbNodes;
public double totalWeight;
public HashMap<Integer, ComNode> topology;
public ComGraph(){
topology = new HashMap();
}
public double modularity(){
double q = 0;
double m2 = totalWeight;
Iterator<Map.Entry<Integer, ComNode>> it = topology.entrySet().iterator();
while(it.hasNext()){
Map.Entry<Integer, ComNode> entry = it.next();
ComNode node = entry.getValue();
if(node.totK > 0)
q += node.inK / m2 - Math.pow(node.totK/m2, 2);
}
return q;
}
public double modularity2(){
double q = 0;
double m2 = totalWeight;
double total = 0;
Iterator<Map.Entry<Integer, ComNode>> it = topology.entrySet().iterator();
while(it.hasNext()){
Map.Entry<Integer, ComNode> entry = it.next();
int com = entry.getKey();
double in = 0, tot = 0;
ComNode node = entry.getValue();
Iterator<Map.Entry<Integer, Double>> subIt = node.neighbors.entrySet().iterator();
while(subIt.hasNext()){
Map.Entry<Integer, Double> subEntry = subIt.next();
int destCom = subEntry.getKey();
double w = subEntry.getValue();
if(com == destCom)
in += w;
tot += w;
total += w;
}
if(node.totK > 0)
q += in / m2 - Math.pow(tot/m2, 2);
}
//System.out.println("m2: " + m2 + " Total: " + total);
return q;
}
public void removeEmptyComm(int comId){
ComNode node = topology.get(comId);
Iterator<Map.Entry<Integer, Double>> it = node.neighbors.entrySet().iterator();
while(it.hasNext()){
Map.Entry<Integer, Double> entry = it.next();
int destCom = entry.getKey();
if(destCom != comId){
topology.get(destCom).neighbors.remove(comId);
}
}
topology.remove(comId);
nbNodes--;
}
//merge the src community to the dest community
public void mergeComm(int srcCom, int destCom, double conn){
ComNode srcNode = topology.get(srcCom);
ComNode destNode = topology.get(destCom);
Iterator<Map.Entry<Integer, Double>> it = srcNode.neighbors.entrySet().iterator();
while(it.hasNext()){
Map.Entry<Integer, Double> entry = it.next();
int neighCom = entry.getKey();
double neighW = entry.getValue();
if(neighCom != destCom && neighCom != srcCom){
increaseWeight(neighCom, destCom, neighW);
}
}
if(destNode.neighbors.containsKey(destCom))
destNode.neighbors.put(destCom, destNode.neighbors.get(destCom) + srcNode.inK + 2 * conn);
else destNode.neighbors.put(destCom, srcNode.inK + 2*conn);
destNode.inK += srcNode.inK + 2 * conn;
destNode.totK += srcNode.totK;
destNode.size += srcNode.size;
removeEmptyComm(srcCom);
}
public void increaseWeight(int srcCom, int destCom, double deltaW){
ComNode srcNode = topology.get(srcCom);
ComNode destNode = topology.get(destCom);
if(!srcNode.neighbors.containsKey(destCom)){
srcNode.neighbors.put(destCom, 0.0);
destNode.neighbors.put(srcCom, 0.0);
}
srcNode.neighbors.put(destCom, srcNode.neighbors.get(destCom) + deltaW);
destNode.neighbors.put(srcCom, destNode.neighbors.get(srcCom) + deltaW);
}
public double getM2(){
double m2 = 0;
Iterator<Map.Entry<Integer, ComNode>> it = topology.entrySet().iterator();
while(it.hasNext()){
Map.Entry<Integer, ComNode> entry = it.next();
ComNode node = entry.getValue();
Iterator<Map.Entry<Integer, Double>> subIt = node.neighbors.entrySet().iterator();
while(subIt.hasNext()){
Map.Entry<Integer, Double> subEntry = subIt.next();
double w = subEntry.getValue();
m2 += w;
}
}
return m2;
}
}
/**
* The community node
* @author shangjiaxing
*
*/
class ComNode{
double totK; //the total degree of the community
double inK; //the inner degree, i.e., self-loops of the community
double size; //the number of nodes in the community
HashMap<Integer, Double> neighbors; //the neighbor communities, where the key is the community ID and the value is the connections among the two communities
public ComNode(){
totK = 0;
inK = 0;
size = 0;
neighbors = new HashMap();
}
}
}
| 44,937 | 34.921663 | 159 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/OtherAlgorithms/BatchInc.java | /**
* The Bath Incremental algorithm
*
*/
package org.dzhuang.dynamic.OtherAlgorithms;
import java.io.*;
import java.util.*;
import org.dzhuang.dynamic.util.*;
import org.dzhuang.dynamic.graph.*;
import org.dzhuang.dynamic.comm.NMI;
public class BatchInc {
public CommGraph g0;
public static void main(String args[]) throws Exception{
String dataset = "arXiv";
String dataset1 = dataset + "/" + dataset;
String graphPath = Parameter.ROOT_PATH + "/" + dataset1 + "_graph_0.txt";
String commPath = FileUtil.replaceFileName(graphPath, dataset + "_comm_0.txt");
BatchInc batchInc = new BatchInc();
batchInc.simpleTest();
//batchInc.initialize(graphPath, commPath);
System.out.println("Succeed!");
}
public void simpleTest() throws Exception{
String dataset = "arXiv";
String dataset1 = dataset + "/" + dataset;
String graphPath = Parameter.ROOT_PATH + "/" + dataset1 + "_graph_0.txt";
String commPath = FileUtil.replaceFileName(graphPath, dataset + "_comm_0.txt");
String incDataPath = Parameter.ROOT_PATH + "/" + dataset1 + "_inc_1.txt";
CommGraph g0 = new CommGraph(graphPath, commPath);
g0.increaseData(incDataPath);
CompressGraph g = new CompressGraph(g0);
Louvain louvain = new Louvain();
louvain.run(g, g0.n2c, 0.001);
}
public void initialize(String graphPath, String commPath) throws Exception{
g0 = new CommGraph(graphPath, commPath);
}
public HashMap increase(String incPath, int dataPoints, String commOutPath) throws Exception{
long t0_1 = System.currentTimeMillis();
HashMap<String, Integer> nodeDict = g0.nodeDict;
HashMap resultMap = new HashMap();
ArrayList<Double> modList = new ArrayList();
ArrayList<Long> timeList = new ArrayList();
ArrayList<Integer> comList = new ArrayList();
long t0_2 = System.currentTimeMillis();
for(int i = 0; i < dataPoints; i++){
long t1 = System.currentTimeMillis();
System.out.println("Running: " + i);
File incFile = new File(FileUtil.extendFileName(incPath, "_" + (i+1)));
String commPath = FileUtil.extendFileName(commOutPath, "_" + (i+1));
if(!incFile.exists())
break;
g0.increaseData(incFile.getAbsolutePath()); //apply the changes to the network
CompressGraph g = new CompressGraph(g0);
HashMap<Integer, Integer> comMap = new HashMap(); //the mapping of node ids among two ierations
HashMap<Integer, CommNode> commStruc = new HashMap(); //the hierarchical community structure
Louvain louvain = new Louvain().runAndExport(g, g0.n2c, nodeDict, comMap, commStruc, 0.001); // run the Louvain algorithm on the network
g0.updateAndWriteCommunity(commStruc, commPath);
modList.add(new Double(Parameter.df.format(louvain.modularity())));
comList.add(g0.commSizeMap.size());
long t2 = System.currentTimeMillis();
long time = t2-t1+t0_2-t0_1;
timeList.add(time);
System.out.println("Q" + (i+1) + ": " + louvain.modularity() + " Time: " + time
+ " Communities: " + g0.commSizeMap.size());
}
resultMap.put("modList", modList);
resultMap.put("timeList", timeList);
resultMap.put("comList", comList);
return resultMap;
}
public HashMap increaseNoComOutput(String incPath, int dataPoints, String baseComPath) throws Exception{
HashMap<String, Integer> nodeDict = g0.nodeDict;
HashMap resultMap = new HashMap();
ArrayList<Float> modList = new ArrayList();
ArrayList<Float> timeList = new ArrayList();
ArrayList<Integer> comList = new ArrayList();
ArrayList<Float> nmiList = new ArrayList();
for(int point = 0; point < dataPoints; point++){
System.out.println("Running: " + point);
long t1 = System.currentTimeMillis();
File incFile = new File(FileUtil.extendFileName(incPath, "_" + (point+1)));
if(!incFile.exists())
break;
g0.increaseData(incFile.getAbsolutePath()); //apply the changes to the network
CompressGraph g = new CompressGraph(g0);
HashMap<Integer, Integer> comMap = new HashMap(); //the mapping of node ids among two ierations
HashMap<Integer, CommNode> commStruc = new HashMap(); //the hierarchical community structure
Louvain louvain = new Louvain().runAndExport(g, g0.n2c, nodeDict, comMap, commStruc, 0.001); // run the Louvain algorithm on the network
long t2 = System.currentTimeMillis();
float time = (float)(t2-t1) / 1000;
String tmpComPath = "comm.tmp";
g0.updateAndWriteCommunity(commStruc, tmpComPath);
double mod = louvain.modularity();
int communities = g0.commSizeMap.size();
String realComPath = FileUtil.extendFileName(baseComPath, "_" + (point+1));
float nmi = (float)NMI.getNMI(realComPath, tmpComPath);
modList.add((float)mod);
timeList.add(time);
comList.add(communities);
nmiList.add(nmi);
FileUtil.deleteFile(tmpComPath);
System.out.println("Q" + (point+1) + ": " + (float)mod + " Time: " + time + " Communities: " + communities + " NMI: " + nmi);
//outputCommunityStatistics();
}
resultMap.put("modList", modList);
resultMap.put("timeList", timeList);
resultMap.put("comList", comList);
resultMap.put("nmiList", nmiList);
return resultMap;
}
public HashMap increasePeriod(String incPath, int periodMonth, String baseComPath) throws Exception{
HashMap<String, Integer> nodeDict = g0.nodeDict;
HashMap resultMap = new HashMap();
ArrayList<Float> modList = new ArrayList();
ArrayList<Float> timeList = new ArrayList();
ArrayList<Integer> comList = new ArrayList();
ArrayList<Float> nmiList = new ArrayList();
boolean updated = false;
for(int point = 0; point < 1000; point++){
long t1 = System.currentTimeMillis();
File incFile = new File(FileUtil.extendFileName(incPath, "_" + (point+1)));
if(!incFile.exists()){
if(!updated){
updated = true;
CompressGraph g = new CompressGraph(g0);
HashMap<Integer, Integer> comMap = new HashMap(); //the mapping of node ids among two ierations
HashMap<Integer, CommNode> commStruc = new HashMap(); //the hierarchical community structure
Louvain louvain = new Louvain().runAndExport(g, g0.n2c, nodeDict, comMap, commStruc, 0.001); // run the Louvain algorithm on the network
long t2 = System.currentTimeMillis();
float time = (float)(t2-t1) / 1000;
String tmpComPath = "comm.tmp";
g0.updateAndWriteCommunity(commStruc, tmpComPath);
double mod = louvain.modularity();
int communities = g0.commSizeMap.size();
String realComPath = FileUtil.extendFileName(baseComPath, "_" + (point));
float nmi = (float)NMI.getNMI(realComPath, tmpComPath);
modList.add((float)mod);
timeList.add(time);
comList.add(communities);
nmiList.add(nmi);
FileUtil.deleteFile(tmpComPath);
System.out.println("Q" + (point+1) + ": " + (float)mod + " Time: " + time + " Communities: " + communities + " NMI: " + nmi);
}
break;
}
g0.increaseData(incFile.getAbsolutePath()); //apply the changes to the network
updated = false;
if((point+1) % periodMonth == 0){
updated = true;
CompressGraph g = new CompressGraph(g0);
HashMap<Integer, Integer> comMap = new HashMap(); //the mapping of node ids among two ierations
HashMap<Integer, CommNode> commStruc = new HashMap(); //the hierarchical community structure
Louvain louvain = new Louvain().runAndExport(g, g0.n2c, nodeDict, comMap, commStruc, 0.001); // run the Louvain algorithm on the network
long t2 = System.currentTimeMillis();
float time = (float)(t2-t1) / 1000;
String tmpComPath = "comm.tmp";
g0.updateAndWriteCommunity(commStruc, tmpComPath);
double mod = louvain.modularity();
int communities = g0.commSizeMap.size();
String realComPath = FileUtil.extendFileName(baseComPath, "_" + (point+1));
float nmi = (float)NMI.getNMI(realComPath, tmpComPath);
modList.add((float)mod);
timeList.add(time);
comList.add(communities);
nmiList.add(nmi);
FileUtil.deleteFile(tmpComPath);
System.out.println("Q" + (point+1) + ": " + (float)mod + " Time: " + time + " Communities: " + communities + " NMI: " + nmi);
}
}
resultMap.put("modList", modList);
resultMap.put("timeList", timeList);
resultMap.put("comList", comList);
resultMap.put("nmiList", nmiList);
return resultMap;
}
public HashMap increaseInitial(String incPath, int initPoint, String baseComPath) throws Exception{
HashMap<String, Integer> nodeDict = g0.nodeDict;
HashMap resultMap = new HashMap();
ArrayList<Float> modList = new ArrayList();
ArrayList<Float> timeList = new ArrayList();
ArrayList<Integer> comList = new ArrayList();
ArrayList<Float> nmiList = new ArrayList();
for(int point = initPoint; point < 10000; point++){
long t1 = System.currentTimeMillis();
File incFile = new File(FileUtil.extendFileName(incPath, "_" + (point+1)));
if(!incFile.exists())
break;
g0.increaseData(incFile.getAbsolutePath()); //apply the changes to the network
CompressGraph g = new CompressGraph(g0);
HashMap<Integer, Integer> comMap = new HashMap(); //the mapping of node ids among two ierations
HashMap<Integer, CommNode> commStruc = new HashMap(); //the hierarchical community structure
Louvain louvain = new Louvain().runAndExport(g, g0.n2c, nodeDict, comMap, commStruc, 0.001); // run the Louvain algorithm on the network
long t2 = System.currentTimeMillis();
float time = (float)(t2-t1) / 1000;
String tmpComPath = "comm.tmp";
g0.updateAndWriteCommunity(commStruc, tmpComPath);
double mod = louvain.modularity();
int communities = g0.commSizeMap.size();
String realComPath = FileUtil.extendFileName(baseComPath, "_" + (point+1));
float nmi = (float)NMI.getNMI(realComPath, tmpComPath);
modList.add((float)mod);
timeList.add(time);
comList.add(communities);
nmiList.add(nmi);
FileUtil.deleteFile(tmpComPath);
System.out.println("Q" + (point+1) + ": " + (float)mod + " Time: " + time + " Communities: " + communities + " NMI: " + nmi);
//outputCommunityStatistics();
}
resultMap.put("modList", modList);
resultMap.put("timeList", timeList);
resultMap.put("comList", comList);
resultMap.put("nmiList", nmiList);
return resultMap;
}
/**
* Define the inner class
* @author shangjiaxing
*
*/
class Louvain{
ArrayList<Double> neighWeight;
ArrayList<Integer> neighPos;
int neighLast;
public CompressGraph g;
public int size;
ArrayList<Integer> n2c;
ArrayList<Double> in, tot;
int nbPass;
double minModularity;
public double runTime;
public Louvain(){
}
public Louvain(CompressGraph g, double minModularity) throws Exception{
this.g = g;
neighWeight = new ArrayList();
neighPos = new ArrayList();
n2c = new ArrayList();
in = new ArrayList();
tot = new ArrayList();
size = g.nbNodes;
neighWeight.ensureCapacity(size);
neighPos.ensureCapacity(size);
for(int i = 0; i < size; i++){
neighWeight.add(-1.0);
neighPos.add(-1);
}
neighLast = 0;
n2c.ensureCapacity(size);
in.ensureCapacity(size);
tot.ensureCapacity(size);
//initialized
for(int i = 0; i < size; i++){
n2c.add(i);
tot.add(g.weightedDegree(i));
in.add(g.nbSelfLoops(i));
}
this.minModularity = minModularity;
}
public void initCommunity(ArrayList<Integer> n2c){
for(int i = 0; i < n2c.size(); i++){
this.n2c.set(i, n2c.get(i));
}
}
/**
* Run the Louvain algorithm with an given initial community structure
* @param g
* @param n2c
* @param precision
* @throws Exception
*/
public void run(CompressGraph g, ArrayList<Integer> n2c, double precision) throws Exception{
System.out.println("Begin");
long t1 = System.currentTimeMillis();
Louvain com = new Louvain(g, precision);
com.initCommunity(n2c);
g = com.partition2Graph();
com = new Louvain(g, precision);
double mod = com.modularity();
boolean improvement = true;
double newMod;
int level = 0;
do{
System.out.println("Level:" + level + "\tNodes:" + com.g.nbNodes +
"\tEdges:" + com.g.nbLinks + "\t links.size():" + com.g.links.size() + "\tTotalWeight:" + com.g.totalWeight);
ArrayList<Integer> links = g.links;
ArrayList<Double> weights = g.weights;
level++;
improvement = com.oneLevel();
newMod = com.modularity();
g = com.partition2Graph();
com = new Louvain(g, precision);
System.out.println("mod increased from " + mod + " to " + newMod);
mod = newMod;
}while(improvement);
long t2 = System.currentTimeMillis();
double time = (double)(t2 - t1)/1000;
com.runTime = time;
System.out.println("Time:" + time + " seconds");
System.out.println("Succeed");
}
public Louvain runAndExport(CompressGraph g, ArrayList<Integer> n2c, HashMap<String, Integer> nodeDict,
HashMap<Integer, Integer> comMap, HashMap<Integer, CommNode> commStruc, double precision) throws Exception{
//System.out.println("Begin");
long t1 = System.currentTimeMillis();
Louvain com = new Louvain(g, precision);
com.initCommunity(n2c);
int cursor = g.nbNodes;
com.exportCommunity(commStruc, comMap, cursor, false);
g = com.partition2Graph();
com = new Louvain(g, precision);
double mod = com.modularity();
boolean improvement = true;
double newMod;
int level = 0;
do{
// System.out.println("Level:" + level + "\tNodes:" + com.g.nbNodes +
// "\tEdges:" + com.g.nbLinks + "\t links.size():" + com.g.links.size() + "\tTotalWeight:" + com.g.totalWeight);
level++;
improvement = com.oneLevel();
newMod = com.modularity();
cursor += g.nbNodes;
if(improvement)
com.exportCommunity(commStruc, comMap, cursor, false);
else
com.exportCommunity(commStruc, comMap, cursor, true);
g = com.partition2Graph();
com = new Louvain(g, precision);
//System.out.println("mod increased from " + mod + " to " + newMod);
mod = newMod;
}while(improvement);
long t2 = System.currentTimeMillis();
double time = (double)(t2 - t1)/1000;
com.runTime = time;
//System.out.println("Time:" + time + " seconds");
//System.out.println("Succeed");
return com;
}
public double modularity(){
double q= 0;
double m2 = (double)g.totalWeight;
for(int i = 0; i < size; i++){
if(tot.get(i) > 0)
q += in.get(i).doubleValue()/m2 - Math.pow(tot.get(i).doubleValue()/m2, 2);
}
return q;
}
public double modularityGain(int node, int comm, double dnodecomm, double wDegree){
double totc = tot.get(comm).doubleValue(); //����comm�����бߵ�Ȩֵ֮��
double degc = wDegree; //�ڵ�node�Ķȣ�����Ȩֵ��
double m2 = g.totalWeight; //���������бߵ�Ȩֵ֮�ͳ���2
double dnc = dnodecomm; //�ڵ�node������comm֮�������Ȩֵ֮��
return (dnc - totc*degc/m2);
}
public void remove(int node, int comm, double dnodecomm){
tot.set(comm, tot.get(comm) - g.weightedDegree(node));
in.set(comm, in.get(comm) - 2*dnodecomm - g.nbSelfLoops(node));
n2c.set(node, -1);
}
public void insert(int node, int comm, double dnodecomm){
tot.set(comm, tot.get(comm) + g.weightedDegree(node));
in.set(comm, in.get(comm) + 2*dnodecomm + g.nbSelfLoops(node));
n2c.set(node, comm);
}
// generate the neighborhood communities of node
// this operation will change list neighWeight, neighPos
public void neighComm(int node){
for(int i = 0; i < neighLast; i++)
neighWeight.set(neighPos.get(i), -1.0);
neighLast = 0;
ArrayList<Pair> neighList = g.neighbors(node);
int deg = g.nbNeighbors(node);
neighPos.set(0, n2c.get(node));
neighWeight.set(neighPos.get(0), 0.0);
neighLast = 1;
for(int i = 0; i < deg; i++){
Pair p = neighList.get(i);
int neigh = p.key;
int neighComm = n2c.get(neigh);
double neighW = p.value;
if(neigh != node){
if(neighWeight.get(neighComm).intValue() == -1){
neighWeight.set(neighComm, 0.0);
neighPos.set(neighLast++, neighComm);
}
neighWeight.set(neighComm, neighWeight.get(neighComm) + neighW);
}
}
}
//Aggragate into a community level network
public CompressGraph partition2Graph(){
ArrayList<Integer> renumber = new ArrayList();
renumber.ensureCapacity(size);
for(int i = 0; i < size; i++)
renumber.add(new Integer(-1));
for(int node = 0; node < size; node++)
renumber.set(n2c.get(node), renumber.get(n2c.get(node)) + 1);
int newIndex = 0;
for(int i = 0; i < size; i++)
if(renumber.get(i) != -1)
renumber.set(i, newIndex++);
ArrayList<ArrayList<Integer>> commNodes = new ArrayList();
for(int i = 0; i < newIndex; i++)
commNodes.add(new ArrayList());
for(int node = 0; node < size; node++){
commNodes.get(renumber.get(n2c.get(node))).add(node);
}
CompressGraph g2 = new CompressGraph();
g2.nbNodes = commNodes.size();
g2.degrees.ensureCapacity(commNodes.size());
for(int i = 0; i < commNodes.size(); i++)
g2.degrees.add(new Integer(-1));
int commDeg = commNodes.size();
for(int comm = 0; comm < commDeg; comm++){
HashMap<Integer, Double> m = new HashMap();
int commSize = commNodes.get(comm).size();
for(int node = 0; node < commSize; node++){
ArrayList<Pair> neighList = g.neighbors(commNodes.get(comm).get(node));
int deg = g.nbNeighbors(commNodes.get(comm).get(node));
for(int i = 0; i < deg; i++){
Pair p = neighList.get(i);
int neigh = p.key;
int neighComm = renumber.get(n2c.get(neigh));
double neighWeight = p.value;
if(!m.containsKey(new Integer(neighComm))){
m.put(neighComm, neighWeight);
}else{
m.put(neighComm, m.get(neighComm) + neighWeight);
}
}
}
g2.degrees.set(comm, (comm==0)?m.size():g2.degrees.get(comm-1)+m.size());
g2.nbLinks += m.size();
Iterator ite = m.entrySet().iterator();
while(ite.hasNext()){
Map.Entry<Integer, Double> entry = (Map.Entry)ite.next();
g2.totalWeight += entry.getValue();
g2.links.add(entry.getKey());
g2.weights.add(entry.getValue());
}
}
return g2;
}
// carry out iteration on one level
public boolean oneLevel(){
boolean improvement = false;
int nbMoves;
int nbPassDone = 0;
double newMod = modularity();
double curMod = newMod;
ArrayList<Integer> randomOrder = new ArrayList();
randomOrder.ensureCapacity(size);
for(int i = 0; i < size; i++){
randomOrder.add(new Integer(i));
}
Random rand = new Random();
for(int i = 0; i < size-1; i++){
int randPos = Math.abs(rand.nextInt()) % (size-i) + i;
int tmp = randomOrder.get(i);
randomOrder.set(i, randomOrder.get(randPos).intValue());
randomOrder.set(randPos, tmp);
}
do{
curMod = newMod;
nbMoves = 0;
nbPassDone++;
//move each node from its current community to its neighbor communities to maximize the gain in Q
for(int nodeTmp = 0; nodeTmp < size; nodeTmp++){
int node = randomOrder.get(nodeTmp);
int nodeComm = n2c.get(node);
double wDegree = g.weightedDegree(node);
neighComm(node);
remove(node, nodeComm, neighWeight.get(nodeComm));
int bestComm = nodeComm;
double bestNbLinks = 0;
double bestIncrease = 0;
for(int i = 0; i < neighLast; i++){
double increase = modularityGain(node, neighPos.get(i), neighWeight.get(neighPos.get(i)), wDegree);
if(increase > bestIncrease){
bestComm = neighPos.get(i);
bestNbLinks = neighWeight.get(neighPos.get(i));
bestIncrease = increase;
}
}
insert(node, bestComm, bestNbLinks);
if(bestComm != nodeComm)
nbMoves++;
}
newMod = modularity();
if(nbMoves > 0 && newMod-curMod > minModularity)
improvement = true;
}while(nbMoves > 0 && newMod - curMod > minModularity);
return improvement;
}
/**
* save the hierarchical community structure
* @param structure - the hierarchical community structure
* @param comMap - the mapping of node ids among two ierations
* @param cursor - the maximum id of current node
* @param isTop - whether the node is the root
* @throws Exception -
*/
public void exportCommunity(HashMap<Integer, CommNode> structure, HashMap<Integer, Integer> comMap,
int cursor, boolean isTop) throws Exception{
ArrayList<Integer> renumber = new ArrayList();
renumber.ensureCapacity(size);
for(int i = 0; i < size; i++)
renumber.add(new Integer(-1));
for(int node = 0; node < size; node++)
renumber.set(n2c.get(node), renumber.get(n2c.get(node)) + 1);
int newIndex = 0;
for(int i = 0; i < size; i++)
if(renumber.get(i) != -1)
renumber.set(i, newIndex++);
if(comMap.isEmpty()){
for(int node = 0; node < size; node++){
int parentId = cursor + renumber.get(n2c.get(node));
CommNode comm = new CommNode(node, NodeType.NODE, parentId);
structure.put(node, comm);
comMap.put(parentId-cursor, parentId);
}
}else if(!isTop){
HashMap<Integer, Integer> tempCommMap = new HashMap();
for(int node = 0; node < size; node++){
int nodeId = comMap.get(node);
//System.out.println(nodeId);
int parentId = cursor + renumber.get(n2c.get(node));
CommNode comm = new CommNode(nodeId, NodeType.COMM, parentId);
structure.put(nodeId, comm);
comMap.remove(node);
tempCommMap.put(parentId-cursor, parentId);
}
comMap.clear();
comMap.putAll(tempCommMap);
}else{
for(int node = 0; node < size; node++){
int nodeId = comMap.get(node);
CommNode comm = new CommNode(nodeId, NodeType.COMM, -1);
structure.put(nodeId, comm);
}
comMap.clear();
}
}
}
/**
* CompressGraph - The compressed graph used in the Louvain algorithm
* @author shangjiaxing
*
*/
class CompressGraph{
int nbNodes; //number of nodes
int nbLinks; //number of edges;
double totalWeight; //sum of the weight of the links*2 (each link is calculated twice)
ArrayList<Integer> degrees; //the cumulative degree of each node
ArrayList<Integer> links; //the neighbor IDs for each node, together with degrees, we can easily get the neighbors for any node, e.g. the first neighbor ID of node i is: links[degrees[i]]
ArrayList<Double> weights; //the weight of each link
ArrayList<ArrayList<Pair>> topology;
public CompressGraph(){
nbNodes = 0;
nbLinks = 0;
totalWeight = 0;
degrees = new ArrayList();
links = new ArrayList();
weights = new ArrayList();
topology = new ArrayList();
}
public CompressGraph(CommGraph g0){
nbNodes = g0.nodes;
nbLinks = 0;
totalWeight = 0;
degrees = new ArrayList();
links = new ArrayList();
weights = new ArrayList();
topology = new ArrayList(nbNodes);
for(int i = 0; i < nbNodes; i++)
topology.add(new ArrayList());
for(int i = 0; i < g0.nodes; i++){
int src = i;
TreeSet<Integer> nodeSet = g0.matrix.get(i);
Iterator<Integer> it = nodeSet.iterator();
while(it.hasNext()){
int dest = it.next();
double weight = 1.0;
topology.get(src).add(new Pair(dest, weight));
}
}
links.ensureCapacity(nbLinks);
weights.ensureCapacity(nbLinks);
for(int i = 0; i < nbNodes; i++){
if(i == 0)
degrees.add(topology.get(i).size());
else
degrees.add(degrees.get(i-1).intValue() + topology.get(i).size());
for(int j = 0; j < topology.get(i).size(); j++){
Pair pair = topology.get(i).get(j);
links.add(pair.key);
weights.add(pair.value);
totalWeight += pair.value;
}
}
topology.clear();
topology = null;
}
public double weightedDegree(int node){
double wDegree = 0;
ArrayList<Pair> neighList = neighbors(node);
for(int i = 0; i < nbNeighbors(node); i++)
wDegree += neighList.get(i).value;
return wDegree;
}
public int nbNeighbors(int node){
if(node == 0){
return degrees.get(0);
}else{
return degrees.get(node) - degrees.get(node-1);
}
}
public double nbSelfLoops(int node){
ArrayList<Pair> neighList = neighbors(node);
for(int i = 0; i < nbNeighbors(node); i++){
Pair p = neighList.get(i);
if(p.key == node)
return p.value;
}
return 0;
}
public ArrayList<Pair> neighbors(int node){
ArrayList<Pair> neighList = new ArrayList();
if(node == 0){
for(int i = 0; i < degrees.get(0).intValue(); i++){
neighList.add(new Pair(links.get(i), weights.get(i)));
}
}
else{
for(int i = degrees.get(node-1); i < degrees.get(node); i++){
neighList.add(new Pair(links.get(i), weights.get(i)));
}
}
return neighList;
}
}
/**
* CommGraph - The uncompressed graph with community structure
* @author shangjiaxing
*
*/
class CommGraph{
int nodes;
double m2;
public HashMap<String, Integer> nodeDict;
ArrayList<Integer> n2c;
TreeMap<Integer, Integer> commSizeMap;
ArrayList<TreeSet<Integer>> matrix;
//initialize the graph with community structure
public CommGraph(String graphPath, String commPath) throws Exception{
nodeDict = FileUtil.getDict(graphPath);
nodes = nodeDict.size();
matrix = new ArrayList(nodes);
for(int i = 0; i < nodes; i++){
matrix.add(new TreeSet());
}
m2 = 0;
BufferedReader br = new BufferedReader(new FileReader(graphPath));
String str = br.readLine();
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
int src = nodeDict.get(token.nextToken());
int dest = nodeDict.get(token.nextToken());
double weight = new Double(token.nextToken());
matrix.get(src).add(dest);
m2 += weight;
if(src != dest){
matrix.get(dest).add(src);
m2 += weight;
}
str = br.readLine();
}
br.close();
readCommunity(commPath);
}
//read the initial community structure
public void readCommunity(String commPath) throws Exception{
n2c = new ArrayList(nodes);
commSizeMap = new TreeMap();
for(int i = 0; i < nodes; i++)
n2c.add(0);
BufferedReader br = new BufferedReader(new FileReader(commPath));
String str = br.readLine();
int commId = 0;
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
int commSize = 0;
while(token.hasMoreTokens()){
int nodeId = nodeDict.get(token.nextToken());
n2c.set(nodeId, commId);
commSize++;
}
commSizeMap.put(commId, commSize);
commId++;
str = br.readLine();
}
br.close();
}
/**
* add incremental data to the network with community structure
* @param incDataPath
* @throws Exception
*/
public void increaseData(String incDataPath) throws Exception{
BufferedReader br = new BufferedReader(new FileReader(incDataPath));
int nodeId = nodeDict.size();
int commId = commSizeMap.size();
String str = br.readLine();
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
String from = token.nextToken();
String to = token.nextToken();
if(!nodeDict.containsKey(from)){
nodeDict.put(from, nodeId);
matrix.add(new TreeSet());
n2c.add(commId);
commSizeMap.put(commId, 1);
nodeId++;
commId++;
nodes++;
}
if(!nodeDict.containsKey(to)){
nodeDict.put(to, nodeId);
matrix.add(new TreeSet());
n2c.add(commId);
commSizeMap.put(commId, 1);
nodeId++;
commId++;
nodes++;
}
int src = nodeDict.get(from);
int dest = nodeDict.get(to);
if(matrix.get(src).contains(dest)){ // if the link already exists, skip
str = br.readLine();
continue;
}
//move src and dest out from their original communities
int srcComm = n2c.get(src);
int destComm = n2c.get(dest);
if(commSizeMap.get(srcComm) > 2){ //move src out from srcComm
commSizeMap.put(srcComm, commSizeMap.get(srcComm)-1);
commSizeMap.put(commId, 1);
n2c.set(src, commId); // update the community label of src
commId++;
}
if(commSizeMap.get(destComm) > 2){ //move dest out from destComm
commSizeMap.put(destComm, commSizeMap.get(destComm)-1);
commSizeMap.put(commId, 1); // put dest in a new community
n2c.set(dest, commId); //update the community label of dest
commId++;
}
//update the adjacent matrix and the total weight
matrix.get(src).add(dest);
matrix.get(dest).add(src);
m2 += 2;
str = br.readLine();
}
}
public void updateAndWriteCommunity(HashMap<Integer, CommNode> commStruc, String commPath) throws Exception{
HashMap<Integer, String> revDict = Utility.reverseDict(nodeDict);
commSizeMap.clear();
HashMap<Integer, ArrayList<Integer>> c2n = new HashMap();
BufferedWriter bw = new BufferedWriter(new FileWriter(commPath));
Iterator<Integer> it = commStruc.keySet().iterator();
while(it.hasNext()){
int nodeId = it.next();
if(commStruc.get(nodeId).type == NodeType.NODE){
int pId = nodeId;
while(commStruc.get(pId).pId != -1){
pId = commStruc.get(pId).pId;
}
if(!c2n.containsKey(pId)){
c2n.put(pId, new ArrayList());
}
c2n.get(pId).add(nodeId);
}
}
int commId = 0;
it = c2n.keySet().iterator();
while(it.hasNext()){
int commIdOld = it.next();
ArrayList<Integer> nodeList = c2n.get(commIdOld);
for(int i = 0; i < nodeList.size(); i++){
int nodeId = nodeList.get(i);
n2c.set(nodeId, commId);
bw.write(revDict.get(nodeId) + "\t");
}
commSizeMap.put(commId, nodeList.size());
bw.write("\r\n");
commId++;
}
bw.close();
}
}
}
| 32,268 | 35.544734 | 193 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/OtherAlgorithms/NodeType.java | package org.dzhuang.dynamic.OtherAlgorithms;
public class NodeType {
//define the type of nodes in tha hierarchical community structure
public static int NODE = 0; //the normal node, which is the leaf nodes
public static int COMM = 1; // the community node, which contains children
}
| 291 | 31.444444 | 76 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/OtherAlgorithms/QCA.java | package org.dzhuang.dynamic.OtherAlgorithms;
import java.util.*;
import java.io.*;
import org.dzhuang.dynamic.comm.NMI;
import org.dzhuang.dynamic.graph.*;
import org.dzhuang.dynamic.util.*;
public class QCA {
ArrayList<Double> neighWeight; //the weight from node u to its neighbor communities
ArrayList<Integer> neighPos; //the index of node u's neighbor communities
int neighLast;
public Graph g; //the graph
public int size; //the number of communities, during iterations, there may be empty communities
ArrayList<Integer> n2c; // the belonging ship from nodes to communities
ArrayList<Double> in, tot; //the inner and total degree of the communities
double precision;
public double runTime;
//The empty constructor
public QCA(){}
/**
* Initialize the heuristic incremental algorithm
* @param graphPath
* @param comPath
* @param comParam - the comParam is used to classify community nodes, while param is used to classify nodes
* @throws Exception
*/
public void init(String graphPath, String comPath, double precision) throws Exception{
this.precision = precision;
readGraph(graphPath);
readCommunity(comPath);
}
public void init(String graphPath, String comPath) throws Exception{
System.out.println("Initializing...");
readGraph(graphPath);
System.out.println("Graph read! Nodes: " + g.nbNodes + " Edges: " + g.totalWeight/2);
readCommunity(comPath);
}
public HashMap increase(String incPath, int maxPoints, String commOutPath) throws Exception{
long t0_1 = System.currentTimeMillis();
HashMap resultMap = new HashMap();
HashMap<String, Integer> nodeDict = g.nodeDict;
ArrayList<Double> modList = new ArrayList();
ArrayList<Long> timeList = new ArrayList();
ArrayList<Integer> comList = new ArrayList();
long t0_2 = System.currentTimeMillis();
for(int point = 0; point < maxPoints; point++){
long t1 = System.currentTimeMillis();
File incFile = new File(FileUtil.extendFileName(incPath, "_" + (point+1)));
if(!incFile.exists())
break;
ArrayList<Data> dataList = FileUtil.readData(incFile.getAbsolutePath());
int start = 0;
while(start < dataList.size()){
TreeMap<Link, Double> deltaG = new TreeMap();
start = readNextBatch(deltaG, dataList, start); //read the next batch of incremental data into linkSet
if(deltaG.size() == 0) // if there is no change
continue;
updateCommunityStructure(deltaG);
}
double mod = modularity();
int communities = nonEmptyCommunities();
this.writeCommunity(FileUtil.extendFileName(commOutPath, "_" + (point+1)));
modList.add(mod);
comList.add(communities);
long t2= System.currentTimeMillis();
long time = t2-t1+t0_2-t0_1;
System.out.println("Q" + point + ": " + mod + " Time: " + time + " Communities: " + communities);
timeList.add(time);
//outputCommunityStatistics();
}
resultMap.put("modList", modList);
resultMap.put("timeList", timeList);
resultMap.put("comList", comList);
return resultMap;
}
public HashMap increaseNoComOutput(String incPath, int maxPoints, String baseComPath) throws Exception{
HashMap resultMap = new HashMap();
HashMap<String, Integer> nodeDict = g.nodeDict;
ArrayList<Float> modList = new ArrayList();
ArrayList<Float> timeList = new ArrayList();
ArrayList<Integer> comList = new ArrayList();
ArrayList<Float> nmiList = new ArrayList();
for(int point = 0; point < maxPoints; point++){
long t1 = System.currentTimeMillis();
File incFile = new File(FileUtil.extendFileName(incPath, "_" + (point+1)));
if(!incFile.exists())
break;
ArrayList<Data> dataList = FileUtil.readData(incFile.getAbsolutePath());
int start = 0;
while(start < dataList.size()){
TreeMap<Link, Double> deltaG = new TreeMap();
start = readNextBatch(deltaG, dataList, start); //read the next batch of incremental data into linkSet
if(deltaG.size() == 0) // if there is no change
continue;
updateCommunityStructure(deltaG);
}
long t2= System.currentTimeMillis();
double mod = modularity();
float time = (float)(t2-t1)/1000;
int communities = nonEmptyCommunities();
String realComPath = FileUtil.extendFileName(baseComPath, "_" + (point+1));
String tmpComPath = "comm.tmp";
this.writeCommunity(tmpComPath);
float nmi = (float)NMI.getNMI(realComPath, tmpComPath);
modList.add((float)mod);
timeList.add(time);
comList.add(communities);
nmiList.add(nmi);
FileUtil.deleteFile(tmpComPath);
System.out.println("Q" + (point+1) + ": " + (float)mod + " Time: " + time + " Communities: " + communities + " NMI: " + nmi);
//outputCommunityStatistics();
}
resultMap.put("modList", modList);
resultMap.put("timeList", timeList);
resultMap.put("comList", comList);
resultMap.put("nmiList", nmiList);
return resultMap;
}
public HashMap increasePeriod(String incPath, int periodMonth, String baseComPath) throws Exception{
HashMap resultMap = new HashMap();
HashMap<String, Integer> nodeDict = g.nodeDict;
ArrayList<Float> modList = new ArrayList();
ArrayList<Float> timeList = new ArrayList();
ArrayList<Integer> comList = new ArrayList();
ArrayList<Float> nmiList = new ArrayList();
ArrayList<Data> dataList = new ArrayList();
for(int point = 0; point < 10000; point++){
File incFile = new File(FileUtil.extendFileName(incPath, "_" + (point+1)));
if(!incFile.exists()){
if(dataList.size() > 0){
long t1 = System.currentTimeMillis();
int start = 0;
while(start < dataList.size()){
TreeMap<Link, Double> deltaG = new TreeMap();
start = readNextBatch(deltaG, dataList, start); //read the next batch of incremental data into linkSet
if(deltaG.size() == 0) // if there is no change
continue;
updateCommunityStructure(deltaG);
}
long t2 = System.currentTimeMillis();
dataList = new ArrayList();
double mod = modularity();
float time = (float)(t2-t1)/1000;
int communities = nonEmptyCommunities();
String realComPath = FileUtil.extendFileName(baseComPath, "_" + (point));
String tmpComPath = "comm.tmp";
this.writeCommunity(tmpComPath);
float nmi = (float)NMI.getNMI(realComPath, tmpComPath);
modList.add((float)mod);
timeList.add(time);
comList.add(communities);
nmiList.add(nmi);
FileUtil.deleteFile(tmpComPath);
System.out.println("Q" + (point+1) + ": " + (float)mod + " Time: " + time + " Communities: " + communities + " NMI: " + nmi);
}
break;
}
dataList.addAll(FileUtil.readData(incFile.getAbsolutePath()));
if((point+1) % periodMonth == 0){
TreeMap<Link, Double> deltaG = new TreeMap();
readBatch(deltaG, dataList, 0, periodMonth);
long t1 = System.currentTimeMillis();
int start = 0;
while(start < dataList.size()){
TreeMap<Link, Double> deltaG1 = new TreeMap();
start = readNextBatch(deltaG1, dataList, start); //read the next batch of incremental data into linkSet
if(deltaG1.size() == 0) // if there is no change
continue;
updateCommunityStructure(deltaG1);
}
long t2 = System.currentTimeMillis();
dataList = new ArrayList();
double mod = modularity();
float time = (float)(t2-t1)/1000;
int communities = nonEmptyCommunities();
String realComPath = FileUtil.extendFileName(baseComPath, "_" + (point+1));
String tmpComPath = "comm.tmp";
this.writeCommunity(tmpComPath);
float nmi = (float)NMI.getNMI(realComPath, tmpComPath);
modList.add((float)mod);
timeList.add(time);
comList.add(communities);
nmiList.add(nmi);
FileUtil.deleteFile(tmpComPath);
System.out.println("Q" + (point+1) + ": " + (float)mod + " Time: " + time + " Communities: " + communities + " NMI: " + nmi);
//outputCommunityStatistics();
}
}
resultMap.put("modList", modList);
resultMap.put("timeList", timeList);
resultMap.put("comList", comList);
resultMap.put("nmiList", nmiList);
return resultMap;
}
public HashMap increaseInitial(String incPath, int initPoint, String baseComPath) throws Exception{
HashMap resultMap = new HashMap();
HashMap<String, Integer> nodeDict = g.nodeDict;
ArrayList<Float> modList = new ArrayList();
ArrayList<Float> timeList = new ArrayList();
ArrayList<Integer> comList = new ArrayList();
ArrayList<Float> nmiList = new ArrayList();
for(int point = initPoint; point < 10000; point++){
long t1 = System.currentTimeMillis();
File incFile = new File(FileUtil.extendFileName(incPath, "_" + (point+1)));
if(!incFile.exists())
break;
ArrayList<Data> dataList = FileUtil.readData(incFile.getAbsolutePath());
int start = 0;
while(start < dataList.size()){
TreeMap<Link, Double> deltaG = new TreeMap();
start = readNextBatch(deltaG, dataList, start); //read the next batch of incremental data into linkSet
if(deltaG.size() == 0) // if there is no change
continue;
updateCommunityStructure(deltaG);
}
long t2= System.currentTimeMillis();
double mod = modularity();
float time = (float)(t2-t1)/1000;
int communities = nonEmptyCommunities();
String realComPath = FileUtil.extendFileName(baseComPath, "_" + (point+1));
String tmpComPath = "comm.tmp";
this.writeCommunity(tmpComPath);
float nmi = (float)NMI.getNMI(realComPath, tmpComPath);
modList.add((float)mod);
timeList.add(time);
comList.add(communities);
nmiList.add(nmi);
FileUtil.deleteFile(tmpComPath);
System.out.println("Q" + (point+1) + ": " + (float)mod + " Time: " + time + " Communities: " + communities + " NMI: " + nmi);
//outputCommunityStatistics();
}
resultMap.put("modList", modList);
resultMap.put("timeList", timeList);
resultMap.put("comList", comList);
resultMap.put("nmiList", nmiList);
return resultMap;
}
public int readBatch(TreeMap<Link, Double> deltaG, ArrayList<Data> dataList, int start, int periodMonth) throws Exception{
int end = start;
long startTime = dataList.get(start).timestamp;
long endTime = DateUtil.nextKMonth(startTime, periodMonth);
//parse the data
for(end = start; end < dataList.size(); end++){
Data data = dataList.get(end);
if(data.timestamp >= endTime)
break;
if(!g.nodeDict.containsKey(data.from))
g.nodeDict.put(data.from, g.nodeDict.size());
if(!g.nodeDict.containsKey(data.to))
g.nodeDict.put(data.to, g.nodeDict.size());
int src = g.nodeDict.get(data.from);
int dest = g.nodeDict.get(data.to);
Link link = new Link(src, dest);
if(src < g.nbNodes && dest < g.nbNodes && g.linkMap.containsKey(link)){
continue;
}
deltaG.put(link, 1.0);
}
if(end == dataList.size() && dataList.get(end-1).timestamp < endTime) //if the final batch of data is incomplete
end = -1;
return end;
}
public void readGraph(String graphPath) throws Exception{
this.g = new Graph(graphPath);
neighWeight = new ArrayList();
neighPos = new ArrayList();
n2c = new ArrayList();
in = new ArrayList();
tot = new ArrayList();
size = g.nbNodes;
neighWeight.ensureCapacity(size);
neighPos.ensureCapacity(size);
for(int i = 0; i < size; i++){
neighWeight.add(new Double(-1.0));
neighPos.add(new Integer(-1));
}
neighLast = 0;
n2c.ensureCapacity(size);
in.ensureCapacity(size);
tot.ensureCapacity(size);
//initialize
for(int i = 0; i < size; i++){
n2c.add(i);
tot.add(g.weightedDegree(i));
in.add(g.nbSelfLoops(i));
}
}
public void readCommunity(String commPath) throws Exception{
BufferedReader br = new BufferedReader(new FileReader(commPath));
String str = br.readLine();
int commId = 0;
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
while(token.hasMoreTokens()){
int nodeId = g.nodeDict.get(token.nextToken());
n2c.set(nodeId, commId);
}
commId++;
str = br.readLine();
}
br.close();
// update the tot and in of the community structure
for(int i = 0; i < size; i++){
tot.set(i, 0.0);
in.set(i, 0.0);
}
for(int i = 0; i < g.nbNodes; i++){
int srcCom = n2c.get(i);
ArrayList<Pair<Integer, Double>> list = g.neighbors(i);
for(int j = 0; j < list.size(); j++){
Pair<Integer, Double> p = list.get(j);
int dest = p.first;
int destCom = n2c.get(dest);
double w = p.second;
if(srcCom == destCom){ //if i and dest are in the same community
in.set(srcCom, in.get(srcCom) + w); //update in value of this community
}
tot.set(srcCom, tot.get(srcCom) + w); //update the tot value of community C(i)
}
}
}
//read the next batch of data, put them into a change graph represented by deltaG
public int readNextBatch(TreeMap<Link, Double> deltaG, ArrayList<Data> dataList, int start) throws Exception{
int end = start;
Data data = dataList.get(end);
//TODO
/*if(data.from==data.to)
System.out.println("GG"+"\t"+data.from+"\t"+data.to);*/
if(!g.nodeDict.containsKey(data.from) && !g.nodeDict.containsKey(data.to)){ // new Edge (u,v), where u and v are both new nodes
g.nodeDict.put(data.from, g.nodeDict.size());
g.nodeDict.put(data.to, g.nodeDict.size());
//TODO
/*if(g.nodeDict.size()==80257) {
System.out.println("GG2"+"\t"+data.from+"\t"+data.to);
}*/
int src = g.nodeDict.get(data.from);
int dest = g.nodeDict.get(data.to);
Link link = new Link(src, dest);
deltaG.put(link, 1.0);
end++;
return end;
}
else if(g.nodeDict.containsKey(data.from) && g.nodeDict.containsKey(data.to)){ //new Edge (u,v), where both u and v are old nodes
int src = g.nodeDict.get(data.from);
int dest = g.nodeDict.get(data.to);
Link link = new Link(src, dest);
if(!g.linkMap.containsKey(link))
deltaG.put(link, 1.0);
end++;
return end;
}
else{ // now node u with adjacent edges to old nodes
String newId = data.from;
if(!g.nodeDict.containsKey(data.from)){
g.nodeDict.put(data.from, g.nodeDict.size());
}
else{
newId = data.to;
g.nodeDict.put(data.to, g.nodeDict.size());
}
int src = g.nodeDict.get(data.from);
int dest = g.nodeDict.get(data.to);
Link link = new Link(src, dest);
deltaG.put(link, 1.0);
end++; //after the first edge is read, we go on reading the other adjacent edges
boolean isRead = false;
while(!isRead && end < dataList.size()){
Data next = dataList.get(end);
String from = next.from;
String to = next.to;
if(newId.equals(from) && g.nodeDict.containsKey(to)){
from = to;
to = newId;
Link l = new Link(g.nodeDict.get(from), g.nodeDict.get(to));
deltaG.put(l, 1.0);
end++;
}
else if(newId.equals(to) && g.nodeDict.containsKey(from)){
Link l = new Link(g.nodeDict.get(from), g.nodeDict.get(to));
deltaG.put(l, 1.0);
end++;
}
else{
isRead = true;
}
}
return end;
}
}
/**
* update the community structure according to the change of the graph
* @param deltaG - the change of graph
* @throws Exception
*/
public void updateCommunityStructure(TreeMap<Link, Double> deltaG) throws Exception{
//there are two cases
if(deltaG.size() == 1){ //case 1: new Edge
Link link = deltaG.keySet().iterator().next();
if(link.src >= g.nbNodes && link.dest >= g.nbNodes || link.src < g.nbLinks && link.dest < g.nbNodes)
newEdgeUpdate(deltaG);
else
newNodeUpdate(deltaG);
}
else{ //case 2: new node
newNodeUpdate(deltaG);
}
}
/**
* New edge (u, v), there are two cases:
* 1. u and v are both new nodes
* 2. u and v are both old nodes
* @param deltaG
* @throws Exception
*/
public void newEdgeUpdate(TreeMap<Link, Double> deltaG) throws Exception{
Iterator<Link> it = deltaG.keySet().iterator();
Link link = it.next();
double w = deltaG.get(link);
//Firstly extend the capacity of the Graph and Community
HashMap<Integer, ArrayList<Pair<Integer, Double>>> topology = new HashMap();
int oldNbNodes = g.nbNodes; // oldNbNodes is used to identify the newly added nodes
while(size < g.nodeDict.size()){
neighWeight.add(-1.0);
neighPos.add(-1);
n2c.add(n2c.size());
in.add(0.0);
tot.add(0.0);
size++;
}
//Secondly, read the change part of the graph from deltaG and update graph
g.addLink(link, w);
//Thirdly, update the community structure
// (a) If both (u,v) are new nodes, put them in a new community
if(link.src >= oldNbNodes && link.dest >= oldNbNodes){
n2c.set(link.dest, n2c.get(link.src));
in.set(n2c.get(link.src), 2*w);
tot.set(n2c.get(link.src), 2*w);
}
else{
//else both u and v are old nodes
int srcCom = n2c.get(link.src);
int destCom = n2c.get(link.dest);
// (b) If u and v are in the same community, keep the community structure unchanged
if(srcCom == destCom){
in.set(srcCom, in.get(srcCom) + 2*w);
tot.set(srcCom, tot.get(srcCom) + 2*w);
}
// (c) The last case: u and v are in different communities, compare deltaQ(u,Cu,Cv) and deltaQ(v,Cu,Cv)
else{
tot.set(srcCom, tot.get(srcCom) + w);
tot.set(destCom, tot.get(destCom) + w);
double m2 = g.totalWeight;
neighComm(link.src);
double dOut1 = neighWeight.get(destCom);
double dIn1 = neighWeight.get(srcCom);
double d1 = g.weightedDegree(link.src);
double deltaQ1 = 2*m2*(dOut1 - dIn1) + dIn1 * (2*tot.get(destCom) - 2*d1 - dIn1) - 2*d1*(d1+tot.get(destCom)-tot.get(srcCom));
neighComm(link.dest);
double dOut2 = neighWeight.get(srcCom);
double dIn2 = neighWeight.get(destCom);
double d2 = g.weightedDegree(link.dest);
double deltaQ2 = 2*m2*(dOut2 - dIn2) + dIn2 * (2*tot.get(srcCom) - 2*d2 - dIn2) - 2*d2*(d2+tot.get(srcCom)-tot.get(destCom));
int movedNode = -1;
if(deltaQ1 > 0 && deltaQ1 > deltaQ2){
n2c.set(link.src, n2c.get(link.dest)); //move u to C(v)
in.set(srcCom, in.get(srcCom) - 2*dIn1);
tot.set(srcCom, tot.get(srcCom) - d1);
in.set(destCom, in.get(destCom) + 2*dOut1);
tot.set(destCom, tot.get(destCom) + d1);
}
else if(deltaQ2 > 0 && deltaQ2 > deltaQ1){
neighComm(link.dest);
remove(link.dest, destCom, neighWeight.get(destCom));
insert(link.dest, srcCom, neighWeight.get(srcCom));
movedNode = link.dest;
}
if(movedNode != -1){
HashSet<Integer> updateSet = new HashSet();
ArrayList<Pair<Integer, Double>> list = g.neighbors(movedNode);
for(int i = 0; i < list.size(); i++){
updateSet.add(list.get(i).first);
}
PriorityQueue<Integer> queue = new PriorityQueue();
queue.addAll(updateSet);
while(!queue.isEmpty()){
ArrayList<Integer> moveList = new ArrayList();
moveList.addAll(queue);
queue.clear();
HashSet<Integer> nextSet = move(moveList);
queue.addAll(nextSet);
}
}
}
}
}
public void newNodeUpdate(TreeMap<Link, Double> deltaG) throws Exception{
//Firstly extend the capacity of the Graph and Community
int oldNbNodes = g.nbNodes; // oldNbNodes is used to identify the newly added nodes
while(size < g.nodeDict.size()){
neighWeight.add(-1.0);
neighPos.add(-1);
n2c.add(n2c.size());
in.add(0.0);
tot.add(0.0);
size++;
}
//Secondly, read the change part of the graph from deltaG and update graph
Link links[] = (Link[])deltaG.keySet().toArray(new Link[deltaG.size()]);
for(int i = 0; i < links.length; i++){
Link link = links[i];
double w = deltaG.get(link);
g.addLink(link, w);
}
//Thirdly, update the community structure
for(int i = 0; i < links.length; i++){
Link link = links[i];
double w = deltaG.get(link);
int srcCom = n2c.get(link.src);
int destCom = n2c.get(link.dest);
//since we know srcCom != destCom, so we do not need to update in
tot.set(srcCom, tot.get(srcCom) + w);
tot.set(destCom, tot.get(destCom) + w);
}
HashSet<Integer> nodeSet = new HashSet();
for(int i = 0; i < links.length; i++){
Link link = links[i];
}
PriorityQueue<Integer> queue = new PriorityQueue();
queue.add(links[0].dest);
while(!queue.isEmpty()){
ArrayList<Integer> moveList = new ArrayList();
moveList.addAll(queue);
queue.clear();
HashSet<Integer> nextSet = move(moveList);
queue.addAll(nextSet);
}
return;
}
public HashSet<Integer> move(ArrayList<Integer> nodeList){
HashSet<Integer> updateSet = new HashSet();
//move node from its current community to the one which gives the maximum gain in modularity
for(int nodeTmp = 0; nodeTmp < nodeList.size(); nodeTmp++){
int node = nodeList.get(nodeTmp);
int nodeComm = n2c.get(node);
double wDegree = g.weightedDegree(node);
neighComm(node);
double Fin = neighWeight.get(nodeComm) - wDegree * (tot.get(nodeComm) - wDegree) / g.totalWeight;
int bestComm = nodeComm;
double bestF = Fin;
for(int i = 0; i < neighLast; i++){
int neighCom = neighPos.get(i);
if(neighCom == nodeComm)
continue;
double Fout = neighWeight.get(neighCom) - wDegree * tot.get(neighCom) / (g.totalWeight);
if(Fout > bestF){
bestF = Fout;
bestComm = neighCom;
}
}
if(bestComm != nodeComm){
remove(node, nodeComm, neighWeight.get(nodeComm));
insert(node, bestComm, neighWeight.get(bestComm));
ArrayList<Pair<Integer, Double>> list = g.neighbors(node);
for(int i = 0; i < list.size(); i++){
Pair<Integer, Double> p = list.get(i);
updateSet.add(p.first);
}
}
// neighComm(node);
// remove(node, nodeComm, neighWeight.get(nodeComm));
//
// int bestComm = nodeComm;
// double bestNbLinks = 0;
// double bestIncrease = 0;
// for(int i = 0; i < neighLast; i++){
// double increase = modularityGain(node, neighPos.get(i), neighWeight.get(neighPos.get(i)), wDegree);
// if(increase > bestIncrease){
// bestComm = neighPos.get(i);
// bestNbLinks = neighWeight.get(bestComm);
// bestIncrease = increase;
// }
// }
// insert(node, bestComm, bestNbLinks);
// if(bestComm != nodeComm){
// ArrayList<Pair<Integer, Double>> list = g.neighbors(node);
// for(int i = 0; i < list.size(); i++){
// Pair<Integer, Double> p = list.get(i);
// updateSet.add(p.first);
// }
// }
}
//System.out.println("�ƶ�����" + nbMoves);
return updateSet;
}
public double modularity(){
double q = 0;
double m2 = (double)g.totalWeight;
for(int i = 0; i < size; i++){
if(tot.get(i) > 0){
q += in.get(i)/m2 - Math.pow(tot.get(i).doubleValue()/m2, 2);
}
}
return q;
}
public int nonEmptyCommunities(){
TreeSet<Integer> comSet = new TreeSet();
for(int i = 0; i < n2c.size(); i++){
int com = n2c.get(i);
comSet.add(com);
}
return comSet.size();
}
public double modularityGain(int node, int comm, double dnodecomm, double wDegree){
double totc = tot.get(comm).doubleValue();
double degc = wDegree;
double m2 = g.totalWeight;
double dnc = dnodecomm;
return (dnc - totc*degc/m2);
}
public void remove(int node, int comm, double dnodecomm){
tot.set(comm, tot.get(comm) - g.weightedDegree(node));
in.set(comm, in.get(comm) - 2*dnodecomm - g.nbSelfLoops(node));
n2c.set(node, -1);
}
public void insert(int node, int comm, double dnodecomm){
tot.set(comm, tot.get(comm) + g.weightedDegree(node));
in.set(comm, in.get(comm) + 2*dnodecomm + g.nbSelfLoops(node));
n2c.set(node, comm);
}
//create a new singleton community for the node
public int insertSingleton(int node){
double k = g.weightedDegree(node);
int commId = 0; //find a usable community id
while(tot.get(commId) > 0)
commId++;
tot.set(commId, k);
in.set(commId, 0.0);
n2c.set(node, commId);
return commId;
}
// generate the neighborhood communities of node
// this operation will change list neighWeight, neighPos
public void neighComm(int node){
for(int i = 0; i < neighLast; i++)
neighWeight.set(neighPos.get(i), -1.0);
neighLast = 0;
ArrayList<Pair<Integer, Double>> list = g.neighbors(node);
int deg = g.nbNeighbors(node);
neighPos.set(0, n2c.get(node));
neighWeight.set(neighPos.get(0), 0.0);
neighLast = 1;
for(int i = 0; i < deg; i++){
Pair<Integer, Double> p = list.get(i);
int neigh = p.first;
int neighComm = n2c.get(neigh);
double neighW = p.second;
if(neigh != node){
if(neighWeight.get(neighComm).intValue() == -1){
neighWeight.set(neighComm, 0.0);
neighPos.set(neighLast++, neighComm);
}
neighWeight.set(neighComm, neighWeight.get(neighComm) + neighW);
}
}
}
public HashMap<Integer, ArrayList<Integer>> getCommunityToNode(){
HashMap<Integer, ArrayList<Integer>> c2n = new HashMap();
for(int i = 0; i < g.nbNodes; i++){
int com = n2c.get(i);
if(!c2n.containsKey(com))
c2n.put(com, new ArrayList());
c2n.get(com).add(i);
}
return c2n;
}
public void outputCommunityStatistics(){
int comNum = 0, maxSize=0, minSize=1000000;
float avgSize = 0;
HashMap<Integer, Integer> sizeMap = new HashMap();
ArrayList<Integer> sizeList = new ArrayList();
ArrayList<Float> modList = new ArrayList();
ArrayList<Float> inList = new ArrayList();
ArrayList<Float> totList = new ArrayList();
for(int i = 0; i < n2c.size(); i++){
int com = n2c.get(i);
if(!sizeMap.containsKey(com))
sizeMap.put(com, 0);
sizeMap.put(com, sizeMap.get(com) + 1);
}
Iterator<Integer> it = sizeMap.keySet().iterator();
double m2 = g.totalWeight;
while(it.hasNext()){
int com = it.next();
int size = sizeMap.get(com);
double mod = in.get(com)/m2 - Math.pow(tot.get(com).doubleValue()/m2, 2);
if(size > maxSize)
maxSize = size;
if(size < minSize)
minSize = size;
sizeList.add(size);
modList.add((float)(mod * m2));
inList.add((float)in.get(com).doubleValue());
totList.add((float)tot.get(com).doubleValue());
}
//sort the results by community size
int tmp1;
float tmp2;
for(int i = 0; i < sizeList.size()-1; i++){
for(int j = i+1; j < sizeList.size(); j++){
if(sizeList.get(i) > sizeList.get(j) || (sizeList.get(i) == sizeList.get(j) && totList.get(i) > totList.get(j))){
Utility.listSwap(sizeList, i, j);
Utility.listSwap(modList, i, j);
Utility.listSwap(inList, i, j);
Utility.listSwap(totList, i, j);
}
}
}
int com8 = 0, com5 = 0; //the number of communities which cotains 80% and 50% of the nodes
int totalSize = 0;
for(int i = sizeList.size()-1; i>=0; i--){
totalSize += sizeList.get(i);
if((double)totalSize / g.nbNodes < 0.8)
com8++;
if((double) totalSize / g.nbNodes < 0.5)
com5++;
}
comNum = sizeMap.size();
avgSize = g.nbNodes / comNum;
System.out.println("Modularity: " + (float)modularity() + " M2: " + g.totalWeight);
System.out.println("#Communities: " + comNum + " Average Size: " + avgSize + " Max Size: " + maxSize + " Min Size: " + minSize);
System.out.println("#Communities for 50% nodes: " + com5 + " #Communities for 80% nodes: " + com8);
// System.out.println("size=" + sizeList + ";");
// System.out.println("Qc=" + modList + ";");
// System.out.println("in=" + inList + ";");
// System.out.println("tot=" + totList + ";");
}
public void validate(ArrayList<ArrayList<Pair<Integer, Double>>> topology){
double weight = 0, sumIn1 = 0, sumIn2 = 0, sumTot1 = 0, sumTot2 = 0;
int nodes = 0, edges = 0;
ArrayList<Double> inList = new ArrayList();
ArrayList<Double> totList = new ArrayList();
for(int i = 0; i < g.nbNodes; i++){
inList.add(0.0);
totList.add(0.0);
}
for(int i = 0; i < topology.size(); i++){
nodes++;
int srcCom = n2c.get(i);
ArrayList<Pair<Integer, Double>> neighList = topology.get(i);
for(int j = 0; j < neighList.size(); j++){
Pair<Integer, Double> p = neighList.get(j);
int dest = p.first;
double w = p.second;
int destCom = n2c.get(dest);
if(srcCom == destCom)
inList.set(srcCom, inList.get(srcCom) + w);
totList.set(srcCom, totList.get(srcCom) + w);
edges++;
weight += w;
}
}
boolean isValid = true;
double q = 0;
for(int i = 0; i < inList.size(); i++){
sumIn1 += in.get(i);
sumIn2 += inList.get(i);
sumTot1 += tot.get(i);
sumTot2 += totList.get(i);
if(in.get(i) != inList.get(i) || tot.get(i) != totList.get(i)){
//System.out.println(i + "\t" + in.get(i) + "\t" + inList.get(i) + "\t" + tot.get(i) + "\t" + totList.get(i));
}
if(totList.get(i) > 0){
q += inList.get(i)/weight - Math.pow(totList.get(i).doubleValue()/weight, 2);
}
}
System.out.println("Mod: " + modularity() + " True mod: " + q);
System.out.println("In1: " + sumIn1 + " In2: "+ sumIn2 + " tot1: " + sumTot1 + " tot2: " + sumTot2);
}
public void writeCommunity(String outPath) throws Exception{
HashMap<Integer, String> revDict = Utility.reverseDict(g.nodeDict);
HashMap<Integer, ArrayList<Integer>> comToNode = new HashMap();
for(int i = 0; i < n2c.size(); i++){
int com = n2c.get(i);
if(!comToNode.containsKey(com))
comToNode.put(com, new ArrayList());
comToNode.get(com).add(i);
}
//write community
BufferedWriter bw = new BufferedWriter(new FileWriter(outPath));
Iterator<Integer> it = comToNode.keySet().iterator();
while(it.hasNext()){
int com = it.next();
ArrayList<Integer> nodeList = comToNode.get(com);
bw.write(revDict.get(nodeList.get(0)));
for(int i = 1; i < nodeList.size(); i++){
bw.write("\t" + revDict.get(nodeList.get(i)));
}
bw.write("\r\n");
}
bw.close();
}
public void writeGraph(String outPath) throws Exception{
HashMap<Integer, String> revDict = Utility.reverseDict(g.nodeDict);
TreeSet<LabelEdge> edgeSet = new TreeSet();
Iterator<Link> it = g.linkMap.keySet().iterator();
while(it.hasNext()){
Link link = it.next();
String from = revDict.get(link.src);
String to = revDict.get(link.dest);
LabelEdge edge = new LabelEdge(from, to);
edgeSet.add(edge);
}
//write graph
BufferedWriter bw = new BufferedWriter(new FileWriter(outPath));
Iterator<LabelEdge> it1 = edgeSet.iterator();
while(it1.hasNext()){
LabelEdge edge = it1.next();
bw.write(edge.src + "\t" + edge.dest + "\t1\r\n");
}
bw.close();
}
/**
* �����ڲ���
* @author shangjiaxing
*
*/
public class Graph{
HashMap<String, Integer> nodeDict; //mapping the node label (String) to node id (Integer)
public int nbNodes; //number of nodes
public int nbLinks; //number of edges;
public double totalWeight; //sum of the weight of the links*2 (each link is calculated twice)
ArrayList<ArrayList<Pair<Integer, Double>>> topology; //The matrix of the graph, the neighbors of i is denoted as topology.get(i)
TreeMap<Link, Double> linkMap;
public Graph(){
nbNodes = 0;
nbLinks = 0;
totalWeight = 0;
//topology = new ArrayList();
}
public Graph(String graphPath) throws Exception{
nodeDict = FileUtil.getDict(graphPath);
nbNodes = nodeDict.size();
topology = new ArrayList();
BufferedReader br = new BufferedReader(new FileReader(graphPath));
topology.ensureCapacity(nbNodes);
linkMap = new TreeMap();
for(int i = 0; i < nbNodes; i++)
topology.add(new ArrayList());
nbLinks = 0;
totalWeight = 0;
String str = br.readLine().trim();
while(str != null && !str.equals("")){
StringTokenizer token = new StringTokenizer(str, "\t");
int src = nodeDict.get(token.nextToken());
int dest = nodeDict.get(token.nextToken());
double weight = new Double(token.nextToken());
linkMap.put(new Link(src, dest), weight);
topology.get(src).add(new Pair(dest, weight));
nbLinks++;
totalWeight += weight; //to support weighted network
if(src != dest){
topology.get(dest).add(new Pair(src, weight));
nbLinks++;
totalWeight += weight;
}
str = br.readLine();
}
br.close();
}
public double weightedDegree(int node){
double wDegree = 0;
ArrayList<Pair<Integer, Double>> list = neighbors(node);
for(int i = 0; i < nbNeighbors(node); i++)
wDegree += list.get(i).second;
return wDegree;
}
public int nbNeighbors(int node){
return topology.get(node).size();
}
public double nbSelfLoops(int node){
ArrayList<Pair<Integer, Double>> list = neighbors(node);
for(int i = 0; i < nbNeighbors(node); i++){
Pair<Integer, Double> p = list.get(i);
if(p.first == node)
return p.second;
}
return 0;
}
public void addLink(Link link, double w){
linkMap.put(link, w);
if(link.src >= nbNodes){
topology.add(new ArrayList());
nbNodes++;
}
//TODO
/*if(link.src==link.dest)
System.out.println(topology.size()+"\t"+link.src+"\t"+link.dest);*/
topology.get(link.src).add(new Pair(link.dest, w));
nbLinks++;
totalWeight += w;
if(link.dest == link.src)
return;
if(link.dest >= nbNodes){
topology.add(new ArrayList());
nbNodes++;
}
topology.get(link.dest).add(new Pair(link.src, w));
nbLinks++;
totalWeight += w;
}
public ArrayList<Pair<Integer, Double>> neighbors(int node){
return topology.get(node);
}
public int getNbNodes(){
return nbNodes;
}
public int getNbLinks(){
return nbLinks;
}
public double getTotalWeight(){
return totalWeight;
}
}
/**
* �����㷨��Ҫ���ڲ���
* @param <T1>
* @param <T2>
*/
class Pair<T1, T2>{
public T1 first;
public T2 second;
public Pair(T1 first, T2 second){
this.first = first;
this.second = second;
}
}
class Link implements Comparable{
int src;
int dest;
public Link(int src, int dest){
if(src < dest){
this.src = src;
this.dest = dest;
}
else{
this.src = dest;
this.dest = src;
}
}
public int compareTo(Object o){
Link e = (Link)o;
if(src < e.src){
return -1;
}
else if(src > e.src){
return 1;
}
else{
if(dest < e.dest)
return -1;
else if(dest > e.dest)
return 1;
else
return 0;
}
}
}
}
| 36,170 | 33.67977 | 141 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/OtherAlgorithms/SampleGenerator3.java | /**
* LouvainRefine Algorithm
*/
package org.dzhuang.dynamic.OtherAlgorithms;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileReader;
import java.io.FileWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.TreeMap;
import java.util.TreeSet;
import org.dzhuang.dynamic.graph.Data;
import org.dzhuang.dynamic.graph.LabelEdge;
import org.dzhuang.dynamic.util.FileUtil;
import org.dzhuang.dynamic.util.Utility;
import toolbox.lr.SampleType;
public class SampleGenerator3 {
ArrayList<Double> neighWeight; // the weight from node u to its neighbor
ArrayList<Integer> neighPos; // the index of node u's neighbor communities
int neighLast;
public Graph g; // the graph
public int size; // the number of communities, during iterations, there may be empty communities
ArrayList<Integer> n2c; // the belonging ship from nodes to communities
ArrayList<Double> in, tot; // the inner and total degree of the communities
double minModularity; // the threshold below which nodes will stop moving
public double runTime;
public int round = 0;
public ArrayList<Double> oldInList = new ArrayList();
public ArrayList<Double> oldKList = new ArrayList();
public SampleGenerator3() {
}
public SampleGenerator3(Graph g, double minModularity) throws Exception {
this.g = g;
this.minModularity = minModularity;
neighWeight = new ArrayList();
neighPos = new ArrayList();
n2c = new ArrayList();
in = new ArrayList();
tot = new ArrayList();
size = g.nbNodes;
neighWeight.ensureCapacity(size);
neighPos.ensureCapacity(size);
for (int i = 0; i < size; i++) {
neighWeight.add(new Double(-1.0));
neighPos.add(new Integer(-1));
}
neighLast = 0;
n2c.ensureCapacity(size);
in.ensureCapacity(size);
tot.ensureCapacity(size);
// initialize
for (int i = 0; i < size; i++) {
n2c.add(i);
tot.add(g.weightedDegree(i));
in.add(g.nbSelfLoops(i));
}
}
public void generateSample(String initDataPath, double ratio, String samplePath) throws Exception {
double precision = 0.0001;
String tmpGraphPath = initDataPath + ".graph.tmp";
String tmpIncPath = initDataPath + ".inc.tmp";
String tmpComPath = initDataPath + ".com.tmp";
splitInitialData(initDataPath, ratio, tmpGraphPath, tmpIncPath);
Louvain louvain = new Louvain();
louvain.runAndExport(tmpGraphPath, 0.0001, tmpComPath);
System.out.println("Generating samples...");
HashSet<String> sampleSet = new HashSet();
Graph g = new Graph(tmpGraphPath);
SampleGenerator3 com = new SampleGenerator3(g, precision);
com.readCommunity(tmpComPath);
System.out.println("initial modularity: " + com.modularity());
TreeMap<Link, Double> deltaG = com.readInc(tmpIncPath); //read the change of network
com.updateCommunityStructure(deltaG, sampleSet);
System.out.println("Modularity after nodes moved: " + com.modularity());
FileUtil.deleteFile(tmpGraphPath);
FileUtil.deleteFile(tmpIncPath);
FileUtil.deleteFile(tmpComPath);
System.out.print("Node samples: " + sampleSet.size() + " ");
com.writeSample(sampleSet, samplePath);
}
public void generateTmpSample(String initDataPath, double ratio, String samplePath) throws Exception {
double precision = 0.0001;
String tmpGraphPath = initDataPath + ".graph.tmp";
String tmpIncPath = initDataPath + ".inc.tmp";
String tmpDecPath = initDataPath + ".dec.tmp";
String tmpComPath = initDataPath + ".com.tmp";
splitInitialData(initDataPath, ratio, tmpGraphPath, tmpIncPath, tmpDecPath);
Louvain louvain = new Louvain();
louvain.runAndExport(tmpGraphPath, 0.0001, tmpComPath);
System.out.println("Generating samples...");
HashSet<String> sampleSet = new HashSet();
Graph g = new Graph(tmpGraphPath);
SampleGenerator3 com = new SampleGenerator3(g, precision);
com.readCommunity(tmpComPath);
System.out.println("initial modularity: " + com.modularity());
TreeMap<Link, Double> incG = com.readInc(tmpIncPath); //read the change of network
TreeMap<Link, Double> decG = com.readDec(tmpDecPath);
com.updateCommunityStructure(incG, decG, sampleSet);
System.out.println("Modularity after nodes moved: " + com.modularity());
FileUtil.deleteFile(tmpGraphPath);
FileUtil.deleteFile(tmpIncPath);
FileUtil.deleteFile(tmpDecPath);
FileUtil.deleteFile(tmpComPath);
System.out.print("Node samples: " + sampleSet.size() + " ");
com.writeSample(sampleSet, samplePath);
}
public void generateSampleNew(String initDataPath, double ratio, String samplePath) throws Exception {
double precision = 0.0001;
String tmpGraphPath = initDataPath + ".graph.tmp";
String tmpIncPath = initDataPath + ".inc.tmp";
String tmpComPath = initDataPath + ".com.tmp";
splitInitialData(initDataPath, ratio, tmpGraphPath, tmpIncPath);
Louvain louvain = new Louvain();
louvain.runAndExport(tmpGraphPath, 0.0001, tmpComPath);
System.out.println("Generating samples...");
HashSet<String> sampleSet = new HashSet();
Graph g = new Graph(tmpGraphPath);
SampleGenerator3 com = new SampleGenerator3(g, precision);
com.readCommunity(tmpComPath);
System.out.println("initial modularity: " + com.modularity());
//initialize
while(oldInList.size() < g.nbNodes){
oldInList.add(0.0);
oldKList.add(0.0);
}
for(int i = 0; i < g.nbNodes; i++){
neighComm(i);
oldKList.set(i, g.weightedDegree(i)); //the weighted degree of i
oldInList.set(i, neighWeight.get(neighPos.get(n2c.get(i)))); //the connection from i to its own community
}
TreeMap<Link, Double> deltaG = com.readInc(tmpIncPath); //read the change of network
com.updateCommunityStructure(deltaG, sampleSet);
System.out.println("Modularity after nodes moved: " + com.modularity());
FileUtil.deleteFile(tmpGraphPath);
FileUtil.deleteFile(tmpIncPath);
FileUtil.deleteFile(tmpComPath);
System.out.print("Node samples: " + sampleSet.size() + " ");
com.writeSample(sampleSet, samplePath);
}
public static void splitInitialData(String initialPath, double ratio, String graphPath, String incPath) throws Exception{
TreeSet<LabelEdge> edgeSet = new TreeSet();
ArrayList<Data> dataList = FileUtil.readData(initialPath);
int divide = (int)(dataList.size() * ratio);
for(int i = 0; i < divide; i++){
Data data = dataList.get(i);
LabelEdge edge = new LabelEdge(data.from, data.to);
edgeSet.add(edge);
}
//write graph file
BufferedWriter bw = new BufferedWriter(new FileWriter(graphPath));
Iterator<LabelEdge> it = edgeSet.iterator();
while(it.hasNext()){
LabelEdge edge = it.next();
bw.write(edge.src + "\t" + edge.dest + "\t1\r\n");
}
bw.close();
//write inc file
bw = new BufferedWriter(new FileWriter(incPath));
for(int i = divide; i < dataList.size(); i++){
Data data = dataList.get(i);
bw.write(data.from + "\t" + data.to + "\t" + data.timestamp + "\r\n");
}
bw.close();
}
public static void splitInitialData(String initialPath, double ratio, String graphPath, String incPath, String decPath) throws Exception{
TreeSet<LabelEdge> edgeSet = new TreeSet();
ArrayList<Data> dataList = FileUtil.readData(initialPath);
int divide = (int)(dataList.size() * ratio);
for(int i = 0; i < divide; i++){
Data data = dataList.get(i);
LabelEdge edge = new LabelEdge(data.from, data.to);
edgeSet.add(edge);
}
//write graph file
BufferedWriter bw = new BufferedWriter(new FileWriter(graphPath));
Iterator<LabelEdge> it = edgeSet.iterator();
while(it.hasNext()){
LabelEdge edge = it.next();
bw.write(edge.src + "\t" + edge.dest + "\t1\r\n");
}
bw.close();
//write inc file
bw = new BufferedWriter(new FileWriter(incPath));
for(int i = divide; i < dataList.size(); i++){
Data data = dataList.get(i);
bw.write(data.from + "\t" + data.to + "\t" + data.timestamp + "\r\n");
}
bw.close();
//write dec file
int divide1 = (int)(dataList.size() * (1-ratio));
bw = new BufferedWriter(new FileWriter(decPath));
for(int i = 0; i < divide1; i++){
Data data = dataList.get(i);
bw.write(data.from + "\t" + data.to + "\t" + data.timestamp + "\r\n");
}
bw.close();
}
public void readCommunity(String commPath) throws Exception{
BufferedReader br = new BufferedReader(new FileReader(commPath));
String str = br.readLine();
int commId = 0;
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
while(token.hasMoreTokens()){
int nodeId = g.nodeDict.get(token.nextToken());
n2c.set(nodeId, commId);
}
commId++;
str = br.readLine();
}
br.close();
// update the tot and in of the community structure
for(int i = 0; i < size; i++){
tot.set(i, 0.0);
in.set(i, 0.0);
}
for(int i = 0; i < g.nbNodes; i++){
int srcCom = n2c.get(i);
ArrayList<Pair<Integer, Double>> neighList = g.topology.get(i);
for(int j = 0; j < neighList.size(); j++){
Pair<Integer, Double> p = neighList.get(j);
int dest = p.first;
int destCom = n2c.get(dest);
double w = p.second;
if(srcCom == destCom){ //if i and dest are in the same community
in.set(srcCom, in.get(srcCom) + w); //update in value of this community
}
tot.set(srcCom, tot.get(srcCom) + w); //update the tot value of community C(i)
}
}
}
public double modularity() {
double q = 0;
double m2 = (double) g.totalWeight;
for (int i = 0; i < size; i++) {
if (tot.get(i) > 0) {
q += in.get(i).doubleValue() / m2
- Math.pow(tot.get(i).doubleValue() / m2, 2);
}
}
return q;
}
public double modularityGain(int node, int comm, double dnodecomm,
double wDegree) {
double totc = tot.get(comm).doubleValue(); // ����comm�����бߵ�Ȩֵ֮��
double degc = wDegree; // �ڵ�node�Ķȣ�����Ȩֵ��
double m2 = g.totalWeight; // ���������бߵ�Ȩֵ֮�ͳ���2
double dnc = dnodecomm; // �ڵ�node������comm֮�������Ȩֵ֮��
return (dnc - totc * degc / m2);
}
public void remove(int node, int comm, double dnodecomm) {
tot.set(comm, tot.get(comm) - g.weightedDegree(node));
in.set(comm, in.get(comm) - 2 * dnodecomm - g.nbSelfLoops(node));
n2c.set(node, -1);
}
public void insert(int node, int comm, double dnodecomm) {
tot.set(comm, tot.get(comm) + g.weightedDegree(node));
in.set(comm, in.get(comm) + 2 * dnodecomm + g.nbSelfLoops(node));
n2c.set(node, comm);
}
// generate the neighborhood communities of node
// this operation will change list neighWeight, neighPos
public void neighComm(int node){
for(int i = 0; i < neighLast; i++)
neighWeight.set(neighPos.get(i), -1.0);
neighLast = 0;
ArrayList<Pair<Integer, Double>> neighList = g.topology.get(node);
int deg = g.nbNeighbors(node);
//System.out.println("node: " + node + " n2c: " + n2c.get(node));
neighPos.set(0, n2c.get(node));
neighWeight.set(neighPos.get(0), 0.0);
neighLast = 1;
for(int i = 0; i < deg; i++){
int neigh = neighList.get(i).first;
int neighComm = n2c.get(neigh);
double neighW = neighList.get(i).second;
if(neigh != node){
if(neighWeight.get(neighComm).intValue() == -1){
neighWeight.set(neighComm, 0.0);
neighPos.set(neighLast++, neighComm);
}
neighWeight.set(neighComm, neighWeight.get(neighComm) + neighW);
}
}
}
// �����ֵ���������������ڵ�ѹ����ͼg2��
public Graph partition2Graph() {
HashMap<Integer, HashMap<Integer, Double>> matrix = new HashMap();
for(int i = 0; i < g.topology.size(); i++){
ArrayList<Pair<Integer, Double>> neighList = g.topology.get(i);
int src = i;
int srcCom = n2c.get(src);
if(!matrix.containsKey(srcCom))
matrix.put(srcCom, new HashMap());
HashMap<Integer, Double> srcMap = matrix.get(srcCom);
for(int j = 0; j < neighList.size(); j++){
Pair<Integer, Double> p = neighList.get(j);
int dest = p.first;
double weight = p.second;
int destCom = n2c.get(dest);
if(!srcMap.containsKey(destCom))
srcMap.put(destCom, weight);
else
srcMap.put(destCom, srcMap.get(destCom) + weight);
}
}
HashMap<Integer, Integer> comIdMap = new HashMap();
for(int i = 0; i < n2c.size(); i++){
int com = n2c.get(i);
if(!comIdMap.containsKey(com))
comIdMap.put(com, comIdMap.size());
}
Graph g2 = new Graph();
g2.nbNodes = comIdMap.size();
g2.topology.ensureCapacity(g2.nbNodes);
for(int i = 0; i < g2.nbNodes; i++){
g2.topology.add(new ArrayList());
}
Iterator<Map.Entry<Integer, HashMap<Integer, Double>>> it = matrix.entrySet().iterator();
while(it.hasNext()){
Map.Entry<Integer, HashMap<Integer, Double>> entry = it.next();
int srcCom = comIdMap.get(entry.getKey());
Iterator<Map.Entry<Integer, Double>> subIt = entry.getValue().entrySet().iterator();
while(subIt.hasNext()){
Map.Entry<Integer, Double> subEntry = subIt.next();
int destCom = comIdMap.get(subEntry.getKey());
double weight = subEntry.getValue();
Pair<Integer, Double> p = new Pair(destCom, weight);
g2.topology.get(srcCom).add(p);
g2.nbLinks++;
g2.totalWeight += p.second;
}
}
return g2;
}
public int nonEmptyCommunities(){
TreeSet<Integer> comSet = new TreeSet();
for(int i = 0; i < n2c.size(); i++){
int com = n2c.get(i);
if(com >= 0)
comSet.add(com);
}
return comSet.size();
}
// carry out iteration on one level
public HashSet<Integer> refine(ArrayList<Integer> nodeList, HashSet<String> sampleSet, boolean doSampling) {
System.out.println("Node to move: " + nodeList.size());
HashSet<Integer> updateSet = new HashSet();
int nbMoves = 0;
for (int nodeTmp = 0; nodeTmp < nodeList.size(); nodeTmp++) {
int node = nodeList.get(nodeTmp);
int nodeComm = n2c.get(node);
double wDegree = g.weightedDegree(node);
neighComm(node);
remove(node, nodeComm, neighWeight.get(nodeComm));
int bestComm = nodeComm;
double bestNbLinks = 0;
double bestIncrease = 0;
for (int i = 0; i < neighLast; i++) {
double increase = modularityGain(node, neighPos.get(i),
neighWeight.get(neighPos.get(i)), wDegree);
if (increase > bestIncrease) {
bestComm = neighPos.get(i);
bestNbLinks = neighWeight.get(neighPos.get(i));
bestIncrease = increase;
}
}
insert(node, bestComm, bestNbLinks);
String str = "" + (int)wDegree;
str += "\t" + neighWeight.get(nodeComm).intValue();
if (bestComm != nodeComm) {
nbMoves++;
if(doSampling)
sampleSet.add(node + "\t" + "1\t" + str);
ArrayList<Pair<Integer, Double>> neighbors = g.topology.get(node);
for(int i = 0; i < neighbors.size(); i++){
Pair<Integer, Double> p = neighbors.get(i);
int neigh = p.first;
updateSet.add(neigh);
}
}else{
if(doSampling)
sampleSet.add(node + "\t" + "0\t" + str);
}
}
System.out.println("Node moved: " + nbMoves);
return updateSet;
}
public boolean oneComLevel(HashSet<String> sampleSet, int base) {
boolean improvement = false;
int nbMoves;
double newMod = modularity();
double curMod = newMod;
ArrayList<Integer> randomOrder = Utility.randomOrderList(size);
int totalMoves = 0;
do {
curMod = newMod;
nbMoves = 0;
// For each node, move it out from its original community and put it into a new community
for (int nodeTmp = 0; nodeTmp < size; nodeTmp++) {
int node = randomOrder.get(nodeTmp);
int nodeComm = n2c.get(node);
double wDegree = g.weightedDegree(node);
neighComm(node);
remove(node, nodeComm, neighWeight.get(nodeComm));
int bestComm = nodeComm;
double bestNbLinks = 0;
double bestIncrease = 0;
for (int i = 0; i < neighLast; i++) {
double increase = modularityGain(node, neighPos.get(i),
neighWeight.get(neighPos.get(i)), wDegree);
if (increase > bestIncrease) {
bestComm = neighPos.get(i);
bestNbLinks = neighWeight.get(neighPos.get(i));
bestIncrease = increase;
}
}
insert(node, bestComm, bestNbLinks);
String str = "" + (int)wDegree + "\t" + (int)g.nbSelfLoops(node);
if (bestComm != nodeComm) {
sampleSet.add((base+node) + "\t" + "1\t" + str);
nbMoves++;
} else {
sampleSet.add((base+node) + "\t" + "0\t" + str);
}
}
newMod = modularity();
if (nbMoves > 0 && newMod - curMod > minModularity)
improvement = true;
} while (nbMoves > 0 && newMod - curMod > minModularity);
return improvement;
}
//read incremental data
public TreeMap<Link, Double> readInc(String incPath) throws Exception{
TreeMap<Link, Double> deltaG = new TreeMap();
BufferedReader br = new BufferedReader(new FileReader(incPath));
String str = br.readLine();
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
String from = token.nextToken();
String to = token.nextToken();
if(!g.nodeDict.containsKey(from))
g.nodeDict.put(from, g.nodeDict.size());
if(!g.nodeDict.containsKey(to))
g.nodeDict.put(to, g.nodeDict.size());
int src = g.nodeDict.get(from);
int dest = g.nodeDict.get(to);
Link link = new Link(src, dest);
if(src < g.nbNodes && dest < g.nbNodes && g.linkMap.containsKey(link)){
str = br.readLine();
continue;
}
deltaG.put(link, 1.0);
str = br.readLine();
}
br.close();
return deltaG;
}
//read deremental data
public TreeMap<Link, Double> readDec(String incPath) throws Exception{
TreeMap<Link, Double> deltaG = new TreeMap();
BufferedReader br = new BufferedReader(new FileReader(incPath));
String str = br.readLine();
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
String from = token.nextToken();
String to = token.nextToken();
int src = g.nodeDict.get(from);
int dest = g.nodeDict.get(to);
Link link = new Link(src, dest);
if(g.linkMap.containsKey(link)){
deltaG.put(link, -1.0);
}
str = br.readLine();
}
br.close();
return deltaG;
}
public void updateCommunityStructure(TreeMap<Link, Double> deltaG, HashSet<String> sampleSet) throws Exception{
int newComId = nonEmptyCommunities();
int oldNbNodes = g.nbNodes;
while(size < g.nodeDict.size()){
neighWeight.add(-1.0);
neighPos.add(-1);
n2c.add(newComId++);
in.add(0.0);
tot.add(0.0);
g.topology.add(new ArrayList());
g.nbNodes++;
size++;
}
//read the change part of the graph from deltaG
Link links[] = (Link []) deltaG.keySet().toArray(new Link[deltaG.size()]);
for(int i = 0; i < links.length; i++){
Link link = links[i];
double w = deltaG.get(link);
g.linkMap.put(new Link(link.src, link.dest), w);
g.topology.get(link.src).add(new Pair(link.dest, w));
g.nbLinks++;
g.totalWeight += w;
if(link.src != link.dest){
g.topology.get(link.dest).add(new Pair(link.src, w));
g.nbLinks++;
g.totalWeight += w;
}
}
// initialize the community structure by putting every new node into a singleton community
TreeSet<Integer> nodeToUpdate = new TreeSet();
for(int i = 0; i < links.length; i++){
Link link = links[i];
double w = deltaG.get(link);
int srcCom = n2c.get(link.src);
int destCom = n2c.get(link.dest);
if(srcCom == destCom){
in.set(srcCom, in.get(srcCom) + 2*w);
}
tot.set(srcCom, tot.get(srcCom) + w);
tot.set(destCom, tot.get(destCom) + w);
nodeToUpdate.add(link.src);
nodeToUpdate.add(link.dest);
}
System.out.println("Modularity after network changed: " + modularity());
ArrayList<Integer> nodeList = new ArrayList();
nodeList.addAll(nodeToUpdate);
boolean doSampling = true;
while(nodeList.size() > 0){
HashSet<Integer> nextSet = refine(nodeList, sampleSet, doSampling);
if(!doSampling)
doSampling = true;
nodeList.clear();
nodeList.addAll(nextSet);
}
}
public void updateCommunityStructure(TreeMap<Link, Double> incG, TreeMap<Link, Double> decG, HashSet<String> sampleSet) throws Exception{
int newComId = nonEmptyCommunities();
int oldNbNodes = g.nbNodes;
while(size < g.nodeDict.size()){
neighWeight.add(-1.0);
neighPos.add(-1);
n2c.add(newComId++);
in.add(0.0);
tot.add(0.0);
g.topology.add(new ArrayList());
g.nbNodes++;
size++;
}
//read the incremental change part of the graph from deltaG
Link links[] = (Link []) incG.keySet().toArray(new Link[incG.size()]);
for(int i = 0; i < links.length; i++){
Link link = links[i];
double w = incG.get(link);
g.linkMap.put(new Link(link.src, link.dest), w);
g.topology.get(link.src).add(new Pair(link.dest, w));
g.nbLinks++;
g.totalWeight += w;
if(link.src != link.dest){
g.topology.get(link.dest).add(new Pair(link.src, w));
g.nbLinks++;
g.totalWeight += w;
}
}
// initialize the community structure by putting every new node into a singleton community
TreeSet<Integer> nodeToUpdate = new TreeSet();
for(int i = 0; i < links.length; i++){
Link link = links[i];
double w = incG.get(link);
int srcCom = n2c.get(link.src);
int destCom = n2c.get(link.dest);
if(srcCom == destCom){
in.set(srcCom, in.get(srcCom) + 2*w);
}
tot.set(srcCom, tot.get(srcCom) + w);
tot.set(destCom, tot.get(destCom) + w);
nodeToUpdate.add(link.src);
nodeToUpdate.add(link.dest);
}
// handle the decremental change of network
Iterator<Link> it = decG.keySet().iterator();
links = (Link []) decG.keySet().toArray(new Link[decG.size()]);
for(int i = 0; i < links.length; i++){
Link link = links[i];
removeLink(link);
}
for(int i = 0; i < links.length; i++){
Link link = links[i];
if(n2c.get(link.src) != -2)
nodeToUpdate.add(link.src);
else
nodeToUpdate.remove(link.src);
if(n2c.get(link.dest) != -2)
nodeToUpdate.add(link.dest);
else
nodeToUpdate.remove(link.dest);
}
//update the community structure
ArrayList<Integer> nodeList = new ArrayList();
nodeList.addAll(nodeToUpdate);
boolean doSampling = true;
while(nodeList.size() > 0){
HashSet<Integer> nextSet = refine(nodeList, sampleSet, doSampling);
if(!doSampling)
doSampling = true;
nodeList.clear();
nodeList.addAll(nextSet);
}
}
public void removeLink(Link link){
int srcCom = n2c.get(link.src);
int destCom = n2c.get(link.dest);
ArrayList<Pair<Integer, Double>> srcList = g.topology.get(link.src);
ArrayList<Pair<Integer, Double>> destList = g.topology.get(link.dest);
for(int i = 0; i < srcList.size(); i++){
Pair<Integer, Double> pair = srcList.get(i);
if(pair.first == link.dest){
srcList.remove(i);
g.nbLinks--;
g.totalWeight--;
g.linkMap.remove(link);
break;
}
}
for(int i = 0; i < destList.size(); i++){
Pair<Integer, Double> pair = destList.get(i);
if(pair.first == link.src){
destList.remove(i);
g.nbLinks--;
g.totalWeight--;
break;
}
}
if(srcCom == destCom)
in.set(srcCom, in.get(srcCom) - 2);
tot.set(srcCom, tot.get(srcCom) - 1);
tot.set(destCom, tot.get(destCom) - 1);
if(srcList.size() == 0)
n2c.set(link.src, -2); //mark the src node as removed
if(destList.size() == 0)
n2c.set(link.dest, -2); //mark the dest node as removed
}
/**
* write the sample data to file
*
* @param moveMap
* @param samplePath
* @param n2pRatio
* - The sample ratio negatives : positives
*/
public void writeSample(HashSet<String> sampleSet, String samplePath) throws Exception {
int positives = 0, negatives = 0;
BufferedWriter bw = new BufferedWriter(new FileWriter(samplePath));
Iterator<String> it = sampleSet.iterator();
while (it.hasNext()) {
String sample = it.next();
sample = sample.substring(sample.indexOf('\t')+1);
int type = new Integer(sample.substring(0, sample.indexOf('\t')));
if(type == SampleType.POSITIVE)
positives++;
else
negatives++;
bw.write(sample + "\r\n");
}
bw.close();
System.out.println("Positives: " + positives + " Negatives: " + negatives);
}
/**
* �����ڲ���
*
* @author shangjiaxing
*
*/
public class Graph{
HashMap<String, Integer> nodeDict; //mapping the node label (String) to node id (Integer)
public int nbNodes; //number of nodes
public int nbLinks; //number of edges;
public double totalWeight; //sum of the weight of the links*2 (each link is calculated twice)
ArrayList<ArrayList<Pair<Integer, Double>>> topology; //The matrix of the graph, the neighbors of i is denoted as topology.get(i)
TreeMap<Link, Double> linkMap;
public Graph(){
nbNodes = 0;
nbLinks = 0;
totalWeight = 0;
topology = new ArrayList();
}
public Graph(String graphPath) throws Exception{
nodeDict = FileUtil.getDict(graphPath);
nbNodes = nodeDict.size();
topology = new ArrayList();
BufferedReader br = new BufferedReader(new FileReader(graphPath));
topology.ensureCapacity(nbNodes);
this.linkMap = new TreeMap();
for(int i = 0; i < nbNodes; i++)
topology.add(new ArrayList());
nbLinks = 0;
totalWeight = 0;
String str = br.readLine().trim();
while(str != null && !str.equals("")){
StringTokenizer token = new StringTokenizer(str, "\t");
int src = nodeDict.get(token.nextToken());
int dest = nodeDict.get(token.nextToken());
double weight = new Double(token.nextToken());
linkMap.put(new Link(src, dest), weight);
topology.get(src).add(new Pair(dest, weight));
nbLinks++;
totalWeight += weight; //to support weighted network
if(src != dest){
topology.get(dest).add(new Pair(src, weight));
nbLinks++;
totalWeight += weight;
}
str = br.readLine();
}
br.close();
}
public double weightedDegree(int node){
double wDegree = 0;
ArrayList<Pair<Integer, Double>> neighList = topology.get(node);
for(int i = 0; i < neighList.size(); i++){
Pair<Integer, Double> p = neighList.get(i);
wDegree += p.second;
}
return wDegree;
}
public int nbNeighbors(int node){
return topology.get(node).size();
}
public double nbSelfLoops(int node){
ArrayList<Pair<Integer, Double>> neighList = topology.get(node);
for(int i = 0; i < neighList.size(); i++){
Pair<Integer, Double> p = neighList.get(i);
if(node == p.first.intValue())
return p.second;
}
return 0;
}
public int getNbNodes(){
return nbNodes;
}
public int getNbLinks(){
return nbLinks;
}
public ArrayList<ArrayList<Pair<Integer, Double>>> getTopology(){
return topology;
}
public double getTotalWeight(){
return totalWeight;
}
}
/**
* �����㷨��Ҫ���ڲ���
*
* @param <T1>
* @param <T2>
*/
class Pair<T1, T2> {
public T1 first;
public T2 second;
public Pair(T1 first, T2 second) {
this.first = first;
this.second = second;
}
}
class Score implements Comparable {
public int key;
public double value;
public Score(int key, double value) {
this.key = key;
this.value = value;
}
public int compareTo(Object obj) {
Score score = (Score) obj;
if (this.value < score.value)
return -1;
else
return 1;
}
}
class Link implements Comparable{
int src;
int dest;
public Link(int src, int dest){
if(src < dest){
this.src = src;
this.dest = dest;
}
else{
this.src = dest;
this.dest = src;
}
}
public int compareTo(Object o){
Link e = (Link)o;
if(src < e.src){
return -1;
}
else if(src > e.src){
return 1;
}
else{
if(dest < e.dest)
return -1;
else if(dest > e.dest)
return 1;
else
return 0;
}
}
}
}
| 28,219 | 30.636771 | 138 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/OtherAlgorithms/CommNode.java | package org.dzhuang.dynamic.OtherAlgorithms;
public class CommNode {
public int id;
public int type = NodeType.NODE;
public int pId = -1; //the parent id the the node (if any), if no parent (root nodes), pId=-1
public CommNode(int id, int type, int pId){
this.id = id;
this.type = type;
this.pId = pId;
}
}
| 327 | 18.294118 | 95 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/OtherAlgorithms/GreMod.java | package org.dzhuang.dynamic.OtherAlgorithms;
import java.util.*;
import java.io.*;
import java.text.*;
import org.dzhuang.dynamic.util.FileUtil;
import org.dzhuang.dynamic.util.Parameter;
import org.dzhuang.dynamic.util.Utility;
public class GreMod{
Community comm;
public static void main(String args[]) throws Exception{
String dataset = "arXiv";
String dataset1 = dataset + "/" + dataset;
String graphPath = Parameter.ROOT_PATH + "/" + dataset1 + "_graph_0.txt";
String commPath = FileUtil.replaceFileName(graphPath, dataset + "_comm_0.txt");
GreMod greMod = new GreMod();
greMod.initialize(graphPath, commPath);
System.out.println("Modularity:" + greMod.comm.modularity());
//greMod.increase(Parameter.ROOT_PATH + "/" + dataset1 + "_inc.txt", 200, Parameter.ROOT_PATH + "/" + dataset1 + "_comm_inc.txt");
}
public GreMod(){
//Do nothing
}
public void initialize(String graphPath, String commPath) throws Exception{
comm = new Community(graphPath, commPath);
}
public HashMap increase(String incPath, int dataPoints, String commOutPath) throws Exception{
long t0_1 = System.currentTimeMillis();
HashMap resultMap = new HashMap();
HashMap<String, Integer> nodeDict = comm.nodeDict;
ArrayList<Double> modList = new ArrayList();
ArrayList<Long> timeList = new ArrayList();
ArrayList<Integer> commList = new ArrayList();
long t0_2 = System.currentTimeMillis();
for(int i = 0; i < dataPoints; i++){
long t1 = System.currentTimeMillis();
int opType = 0;
File incFile = new File(FileUtil.extendFileName(incPath, "_" + (i+1)));
if(!incFile.exists())
break;
BufferedReader br = new BufferedReader(new FileReader(incFile));
String str = br.readLine();
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
String from = token.nextToken();
String to = token.nextToken();
double w = 1.0;
//if both nodes exist in the graph
if(nodeDict.containsKey(from) && nodeDict.containsKey(to)){
int src = nodeDict.get(from);
int dest = nodeDict.get(to);
if(comm.g.linkMap.containsKey(new Link(src, dest))){ //if this link already exists, ignore it
str = br.readLine();
continue;
}
int srcComm = comm.n2c.get(src);
int destComm = comm.n2c.get(dest);
//1. if the two nodes are in the same community, keep the community structure unchanged
if(srcComm == destComm){
comm.in.set(srcComm, comm.in.get(srcComm) + 2*w);
comm.tot.set(srcComm, comm.tot.get(srcComm) + 2*w);
opType = 1;
}else{//2 if the two nodes are in different communities, consider to merging them together
//get the neighborhood communities of srcComm and destComm
TreeMap<Integer, Double> srcMap = comm.commMatrix.get(srcComm);
TreeMap<Integer, Double> destMap = comm.commMatrix.get(destComm);
if(w * (comm.g.m2 + 2*w) > 2 * (comm.tot.get(srcComm) + w) * (comm.tot.get(destComm) + w)){ //2.1 merge two communities
for(int j = 0; j < comm.c2n.get(destComm).size(); j++){ //move each node from community destComm to srcComm
int nodeToBeMoved = comm.c2n.get(destComm).get(j);
comm.c2n.get(srcComm).add(nodeToBeMoved);
comm.n2c.set(nodeToBeMoved, srcComm);
}
comm.c2n.put(destComm, new ArrayList()); //clear the node IDs from community destComm
//shifting the neighborhood connections from destComm to srcComm
Iterator<Integer> it = destMap.keySet().iterator();
while(it.hasNext()){
int neighborComm = it.next(); //for each neighborhood communities of destComm, connect them to srcComm
double commWeight = destMap.get(neighborComm);
if(srcComm != neighborComm){
TreeMap<Integer, Double> neighborMap = comm.commMatrix.get(neighborComm);
if(!srcMap.containsKey(neighborComm)){
srcMap.put(neighborComm, commWeight);
neighborMap.put(srcComm, commWeight);
}
else{
srcMap.put(neighborComm, srcMap.get(neighborComm) + commWeight);
neighborMap.put(srcComm, neighborMap.get(srcComm) + commWeight);
}
neighborMap.remove(destComm);
}
else{
comm.in.set(srcComm, comm.in.get(srcComm) + 2*commWeight);
}
}
srcMap.remove(destComm);
comm.commMatrix.get(destComm).clear(); //remove community destComm
comm.in.set(srcComm, comm.in.get(srcComm) + comm.in.get(destComm) + 2*w); //update the total inner community weight
comm.tot.set(srcComm, comm.tot.get(srcComm) + comm.tot.get(destComm) + 2*w); //update the total community weight
comm.in.set(destComm, 0.0); //remove community destComm by setting its in and tot value to be zero
comm.tot.set(destComm, 0.0);
opType = 2;
}
else{ //2.2 keep the community structure unchanged
comm.tot.set(srcComm, comm.tot.get(srcComm) + w);
comm.tot.set(destComm, comm.tot.get(destComm) + w);
//update the connections between srcComm and destComm
if(!srcMap.containsKey(destComm)){
srcMap.put(destComm, w);
destMap.put(srcComm, w);
}
else{
srcMap.put(destComm, srcMap.get(destComm) + w);
destMap.put(srcComm, destMap.get(srcComm) + w);
}
opType = 3;
}
}
}
else if(nodeDict.containsKey(from) || nodeDict.containsKey(to)){ //3. if one of the nodes is a new node
int src;
int dest = nodeDict.size(); // let dest to be the new node ID
if(nodeDict.containsKey(from)){
src = nodeDict.get(from);
nodeDict.put(to, dest);
}
else{
src = nodeDict.get(to);
nodeDict.put(from, dest);
}
//assign the new node to the community which node src belongs to
int srcComm = comm.n2c.get(src);
comm.n2c.add(srcComm);
comm.c2n.get(srcComm).add(dest);
comm.in.set(srcComm, comm.in.get(srcComm) + 2*w);
comm.tot.set(srcComm, comm.tot.get(srcComm)+ 2*w);
comm.g.matrix.add(new ArrayList()); //initialize the neighbor list of node dest
comm.g.nodes++;
opType = 4;
}
else{ // 4. both the two nodes are new nodes
int src = nodeDict.size(); //assign IDs to the new nodes
int dest = src+1;
nodeDict.put(from, src); //put the nodes into nodeDict
nodeDict.put(to, dest);
int commId = comm.in.size(); //create a new community for the two nodes
ArrayList<Integer> nodeList = new ArrayList(); //the list containing the node IDs for the community
nodeList.add(src); //add the two nodes to the new community
nodeList.add(dest);
comm.c2n.put(commId, nodeList); //set the node list for the community
comm.n2c.add(commId); // assign the community label to the two nodes
comm.n2c.add(commId);
comm.in.add(2 * w);
comm.tot.add(2 * w);
comm.commMatrix.add(new TreeMap());
//initialize the neighbor list of the two nodes
comm.g.matrix.add(new ArrayList());
comm.g.matrix.add(new ArrayList());
comm.g.nodes += 2;
opType = 5;
}
//update the graph adjacent matrix
int src = nodeDict.get(from);
int dest = nodeDict.get(to);
comm.g.m2 += 2*w;
comm.g.matrix.get(src).add(dest);
comm.g.matrix.get(dest).add(src);
comm.g.linkMap.put(new Link(src, dest), w);
str = br.readLine();
}
br.close();
modList.add(new Double(Parameter.df.format(comm.modularity())));
commList.add(comm.communities());
comm.exportCommunity(FileUtil.extendFileName(commOutPath, "_" + (i+1)));
long t2 = System.currentTimeMillis();
long time = t2-t1+t0_2-t0_1;
System.out.println("Time pint: " + (i+1) + ": modularity: " + comm.modularity() + " time: " + time + " seconds");
timeList.add(time);
}
resultMap.put("modList", modList);
resultMap.put("timeList", timeList);
resultMap.put("comList", commList);
return resultMap;
}
public double modularity(){
return comm.modularity();
}
class Community{
Graph g;
ArrayList<Integer> n2c; // the community ID for each node
TreeMap<Integer, ArrayList<Integer>> c2n; // the nodes for each community
ArrayList<Double> in, tot; //the inner and total weight of the edge for each community
HashMap<String, Integer> nodeDict; //mapping the node label to integer ID
ArrayList<TreeMap<Integer, Double>> commMatrix; //the adjacent matrix of the community structure
public Community(String graphPath, String commPath) throws Exception{
nodeDict = FileUtil.getDict(graphPath);
g = new Graph(graphPath);
readCommunity(commPath);
}
public int communities(){
int commNum = 0;
if(c2n == null)
return 0;
for(int i = 0; i < c2n.size(); i++){
if(c2n.get(i).size() > 0)
commNum++;
}
return commNum;
}
public boolean hasSelfloop(){
for(int i = 0; i < commMatrix.size(); i++){
TreeMap<Integer, Double> map = commMatrix.get(i);
Iterator<Integer> it = map.keySet().iterator();
while(it.hasNext()){
int commId = it.next();
if(commId == i){
System.out.println(commId);
return true;
}
}
}
return false;
}
//Read the community structure
public void readCommunity(String commPath) throws Exception{
n2c = new ArrayList(g.nodes);
c2n = new TreeMap();
for(int i = 0; i < g.nodes; i++)
n2c.add(0);
BufferedReader br = new BufferedReader(new FileReader(commPath));
String str = br.readLine();
int commId = 0;
while(str != null){
ArrayList<Integer> nodeList = new ArrayList();
StringTokenizer token = new StringTokenizer(str, "\t");
while(token.hasMoreTokens()){
int nodeId = nodeDict.get(token.nextToken());
nodeList.add(nodeId);
n2c.set(nodeId, commId);
}
c2n.put(commId, nodeList);
commId++;
str = br.readLine();
}
br.close();
in = new ArrayList(commId);
tot = new ArrayList(commId);
commMatrix = new ArrayList();
for(int i = 0; i < commId; i++){
in.add(i, 0.0);
tot.add(i, 0.0);
commMatrix.add(new TreeMap());
}
for(int i = 0; i < g.matrix.size(); i++){
ArrayList<Integer> neighborList = g.matrix.get(i);
int src = i;
int srcComm = n2c.get(src);
for(int j = 0; j < neighborList.size(); j++){
int dest = neighborList.get(j);
int destComm = n2c.get(dest);
double weight = g.linkMap.get(new Link(src, dest));
if(srcComm == destComm)
in.set(srcComm, in.get(srcComm) + weight);
tot.set(srcComm, tot.get(srcComm) + weight);
if(srcComm != destComm){
TreeMap<Integer, Double> srcMap = commMatrix.get(srcComm);
TreeMap<Integer, Double> destMap = commMatrix.get(destComm);
if(!srcMap.containsKey(destComm)){
srcMap.put(destComm, weight);
destMap.put(srcComm, weight);
}
else{
srcMap.put(destComm, srcMap.get(destComm) + weight);
destMap.put(srcComm, destMap.get(srcComm) + weight);
}
}
}
}
}
//write the community structure to file, each line contains the node IDs of a community
public void exportCommunity(String commPath) throws Exception{
HashMap<Integer, String> revNodeDict = Utility.reverseDict(nodeDict);
TreeMap<Integer, ArrayList<Integer>> c2n = new TreeMap();
for(int i = 0; i < n2c.size(); i++){
int commId = n2c.get(i);
if(!c2n.containsKey(commId))
c2n.put(commId, new ArrayList());
c2n.get(commId).add(i);
}
//Write file
BufferedWriter bw = new BufferedWriter(new FileWriter(commPath));
Iterator<Integer> it = c2n.keySet().iterator();
while(it.hasNext()){
int commId = it.next();
ArrayList<Integer> nodeList = c2n.get(commId);
for(int i = 0; i < nodeList.size(); i++){
String nodeLabel = revNodeDict.get(nodeList.get(i));
bw.write(nodeLabel + "\t");
}
bw.write("\r\n");
}
bw.close();
}
//Compute the modularity value
public double modularity(){
double Q = 0;
for(int i = 0; i < in.size(); i++){
Q += in.get(i) / g.m2 - Math.pow(tot.get(i) / g.m2, 2);
}
return Q;
}
}
class Graph{
int nodes;
double m2;
ArrayList<ArrayList<Integer>> matrix; // the adjacent matrix
TreeMap<Link, Double> linkMap;
public Graph(String graphPath) throws Exception{
HashMap<String, Integer> nodeDict = FileUtil.getDict(graphPath);
nodes = nodeDict.size();
matrix = new ArrayList(nodes);
for(int i = 0; i < nodes; i++){
matrix.add(new ArrayList());
}
m2 = 0;
BufferedReader br = new BufferedReader(new FileReader(graphPath));
linkMap = new TreeMap();
String str = br.readLine();
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
int src = nodeDict.get(token.nextToken());
int dest = nodeDict.get(token.nextToken());
double weight = new Double(token.nextToken());
matrix.get(src).add(dest);
linkMap.put(new Link(src, dest), weight);
m2 += weight;
if(src != dest){
matrix.get(dest).add(src);
m2 += weight;
}
str = br.readLine();
}
br.close();
}
}
class Link implements Comparable{
int src;
int dest;
public Link(int src, int dest){
if(src < dest){
this.src = src;
this.dest = dest;
}
else{
this.src = dest;
this.dest = src;
}
}
public int compareTo(Object o){
Link e = (Link)o;
if(src < e.src){
return -1;
}
else if(src > e.src){
return 1;
}
else{
if(dest < e.dest)
return -1;
else if(dest > e.dest)
return 1;
else
return 0;
}
}
}
}
| 13,578 | 32.528395 | 132 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/OtherAlgorithms/Louvain.java | /**
* Louvain�㷨
*/
package org.dzhuang.dynamic.OtherAlgorithms;
import java.io.*;
import java.util.*;
import org.dzhuang.dynamic.util.FileUtil;
import org.dzhuang.dynamic.util.Parameter;
import org.dzhuang.dynamic.util.Utility;
public class Louvain {
ArrayList<Double> neighWeight;
ArrayList<Integer> neighPos;
int neighLast;
public Graph g;
public int size;
ArrayList<Integer> n2c;
ArrayList<Double> in, tot;
double minModularity;
public double runTime;
public static void main(String args[]) throws Exception{
String data = "facebook";
String data1 = data + "/" + data;
String graphPath = Parameter.ROOT_PATH + "/" + data1 + "_graph_0.txt";
String commPath = Parameter.ROOT_PATH + "/" + data1 + "_comm_0.txt";
Louvain louvain = new Louvain();
//louvain.run(Parameter.ROOT_PATH + "/enron/enron_graph_20.txt", 3, 0.001);
louvain.runAndExport(graphPath, 0.0001, commPath);
}
public Louvain(){
}
public Louvain(Graph g, double minModularity) throws Exception{
this.g = g;
neighWeight = new ArrayList();
neighPos = new ArrayList();
n2c = new ArrayList();
in = new ArrayList();
tot = new ArrayList();
size = g.nbNodes;
neighWeight.ensureCapacity(size);
neighPos.ensureCapacity(size);
for(int i = 0; i < size; i++){
neighWeight.add(new Double(-1.0));
neighPos.add(new Integer(-1));
}
neighLast = 0;
n2c.ensureCapacity(size);
in.ensureCapacity(size);
tot.ensureCapacity(size);
//initialize
for(int i = 0; i < size; i++){
n2c.add(i);
tot.add(g.weightedDegree(i));
in.add(g.nbSelfLoops(i));
}
this.minModularity = minModularity;
}
public Louvain run(String filePath, double precision) throws Exception{
System.out.println("Begin");
Graph g = new Graph(filePath);
Louvain com = new Louvain(g, precision);
boolean improvement = true;
double mod = com.modularity();
double newMod;
int level = 0;
long t1 = System.currentTimeMillis();
do{
System.out.println("Level:" + level + "\tNodes:" + com.g.nbNodes +
"\tEdges:" + com.g.nbLinks + "\t links.size():" + com.g.links.size() + "\tTotalWeight:" + com.g.totalWeight);
ArrayList<Integer> links = g.links;
ArrayList<Double> weights = g.weights;
level++;
improvement = com.oneLevel();
newMod = com.modularity();
g = com.partition2Graph();
com = new Louvain(g, precision);
System.out.println("mod increased from " + mod + " to " + newMod);
mod = newMod;
}while(improvement);
long t2 = System.currentTimeMillis();
double time = (double)(t2 - t1)/1000;
com.runTime = time;
System.out.println("Time:" + time + " seconds");
System.out.println("Succeed");
return com;
}
public Louvain runWithInitialCommunity(String graphPath, String comPath, double precision) throws Exception{
System.out.println("Begin");
Graph g = new Graph(graphPath);
Louvain com = new Louvain(g, precision);
com.readCommunity(comPath);
boolean improvement = true;
double mod = com.modularity();
double newMod;
int level = 0;
long t1 = System.currentTimeMillis();
do{
System.out.println("Level:" + level + "\tCommunities: " + com.nonEmptyCommunities() + "\tNodes:" + com.g.nbNodes +
"\tEdges:" + com.g.nbLinks + "\t links.size():" + com.g.links.size() + "\tTotalWeight:" + com.g.totalWeight);
ArrayList<Integer> links = g.links;
ArrayList<Double> weights = g.weights;
level++;
improvement = com.oneLevel();
newMod = com.modularity();
g = com.partition2Graph();
com = new Louvain(g, precision);
System.out.println("mod increased from " + mod + " to " + newMod);
mod = newMod;
}while(improvement);
long t2 = System.currentTimeMillis();
double time = (double)(t2 - t1)/1000;
com.runTime = time;
System.out.println("Time:" + time + " seconds");
System.out.println("Succeed");
return com;
}
public Louvain runAndExport(String graphPath, double precision, String commPath) throws Exception{
System.out.println("Begin");
Graph g = new Graph(graphPath);
Louvain com = new Louvain(g, precision);
boolean improvement = true;
double mod = com.modularity();
double newMod;
int level = 0;
long t1 = System.currentTimeMillis();
int cursor = 0;
HashMap<Integer, Integer> comMap = new HashMap();
HashMap<Integer, CommNode> commStruc = new HashMap();
do{
System.out.println("Level:" + level + "\tNodes:" + com.g.nbNodes +
"\tEdges:" + com.g.nbLinks + "\t links.size():" + com.g.links.size() + "\tTotalWeight:" + com.g.totalWeight);
level++;
improvement = com.oneLevel();
newMod = com.modularity();
cursor += g.nbNodes;
if(improvement)
com.exportCommunity(commStruc, comMap, cursor, false);
else
com.exportCommunity(commStruc, comMap, cursor, true);
g = com.partition2Graph();
com = new Louvain(g, precision);
System.out.println("mod increased from " + mod + " to " + newMod);
mod = newMod;
}while(improvement);
long t2 = System.currentTimeMillis();
double time = (double)(t2 - t1)/1000;
com.runTime = time;
System.out.println("Time:" + time + " seconds");
writeCommunity(graphPath, commStruc, commPath);
return com;
}
public double modularity(){
double q = 0;
double m2 = (double)g.totalWeight;
for(int i = 0; i < size; i++){
if(tot.get(i) > 0)
q += in.get(i).doubleValue()/m2 - Math.pow(tot.get(i).doubleValue()/m2, 2);
}
return q;
}
public double modularityGain(int node, int comm, double dnodecomm, double wDegree){
double totc = tot.get(comm).doubleValue(); //����comm�����бߵ�Ȩֵ֮��
double degc = wDegree; //�ڵ�node�Ķȣ���Ȩֵ��
double m2 = g.totalWeight; //���������бߵ�Ȩֵ֮�ͳ���2
double dnc = dnodecomm; //�ڵ�node������comm֮�������Ȩֵ֮�ᅣ1�7
return (dnc - totc*degc/m2);
}
public void remove(int node, int comm, double dnodecomm){
tot.set(comm, tot.get(comm) - g.weightedDegree(node));
in.set(comm, in.get(comm) - 2*dnodecomm - g.nbSelfLoops(node));
n2c.set(node, -1);
}
public void insert(int node, int comm, double dnodecomm){
tot.set(comm, tot.get(comm) + g.weightedDegree(node));
in.set(comm, in.get(comm) + 2*dnodecomm + g.nbSelfLoops(node));
n2c.set(node, comm);
}
// generate the neighborhood communities of node
// this operation will change list neighWeight, neighPos
public void neighComm(int node){
for(int i = 0; i < neighLast; i++)
neighWeight.set(neighPos.get(i), -1.0);
neighLast = 0;
Pair<ArrayList<Integer>, ArrayList<Double>> p = g.neighbors(node);
int deg = g.nbNeighbors(node);
neighPos.set(0, n2c.get(node));
neighWeight.set(neighPos.get(0), 0.0);
neighLast = 1;
for(int i = 0; i < deg; i++){
int neigh = p.first.get(i);
int neighComm = n2c.get(neigh);
double neighW = p.second.get(i);
if(neigh != node){
if(neighWeight.get(neighComm).intValue() == -1){
neighWeight.set(neighComm, 0.0);
neighPos.set(neighLast++, neighComm);
}
neighWeight.set(neighComm, neighWeight.get(neighComm) + neighW);
}
}
}
//�����ֵ�����������ڵ�ѹ����ͼg2��
public Graph partition2Graph(){
ArrayList<Integer> renumber = new ArrayList();
renumber.ensureCapacity(size);
for(int i = 0; i < size; i++)
renumber.add(new Integer(-1));
for(int node = 0; node < size; node++)
renumber.set(n2c.get(node), renumber.get(n2c.get(node)) + 1);
int newIndex = 0;
for(int i = 0; i < size; i++)
if(renumber.get(i) != -1)
renumber.set(i, newIndex++);
ArrayList<ArrayList<Integer>> commNodes = new ArrayList();
for(int i = 0; i < newIndex; i++)
commNodes.add(new ArrayList());
for(int node = 0; node < size; node++){
commNodes.get(renumber.get(n2c.get(node))).add(node);
}
Graph g2 = new Graph();
g2.nbNodes = commNodes.size();
g2.degrees.ensureCapacity(commNodes.size());
for(int i = 0; i < commNodes.size(); i++)
g2.degrees.add(new Integer(-1));
int commDeg = commNodes.size();
for(int comm = 0; comm < commDeg; comm++){
HashMap<Integer, Double> m = new HashMap();
int commSize = commNodes.get(comm).size();
for(int node = 0; node < commSize; node++){
Pair<ArrayList<Integer>, ArrayList<Double>> p = g.neighbors(commNodes.get(comm).get(node));
int deg = g.nbNeighbors(commNodes.get(comm).get(node));
for(int i = 0; i < deg; i++){
int neigh = p.first.get(i);
int neighComm = renumber.get(n2c.get(neigh));
double neighWeight = p.second.get(i);
if(!m.containsKey(new Integer(neighComm))){
m.put(neighComm, neighWeight);
}else{
m.put(neighComm, m.get(neighComm) + neighWeight);
}
}
}
g2.degrees.set(comm, (comm==0)?m.size():g2.degrees.get(comm-1)+m.size());
g2.nbLinks += m.size();
Iterator ite = m.entrySet().iterator();
while(ite.hasNext()){
Map.Entry<Integer, Double> entry = (Map.Entry)ite.next();
g2.totalWeight += entry.getValue();
g2.links.add(entry.getKey());
g2.weights.add(entry.getValue());
}
}
return g2;
}
// carry out iteration on one level
public boolean oneLevel(){
boolean improvement = false;
int nbMoves;
double newMod = modularity();
double curMod = newMod;
ArrayList<Integer> randomOrder = new ArrayList();
randomOrder.ensureCapacity(size);
for(int i = 0; i < size; i++){
randomOrder.add(new Integer(i));
}
Random rand = new Random();
for(int i = 0; i < size-1; i++){
int randPos = Math.abs(rand.nextInt()) % (size-i) + i;
int tmp = randomOrder.get(i);
randomOrder.set(i, randomOrder.get(randPos).intValue());
randomOrder.set(randPos, tmp);
}
do{
curMod = newMod;
nbMoves = 0;
//����ÿ���ڵ㣬����ӵ�ǰ�����Ƴ����뵽ʹ��Q������������
for(int nodeTmp = 0; nodeTmp < size; nodeTmp++){
int node = randomOrder.get(nodeTmp);
int nodeComm = n2c.get(node);
double wDegree = g.weightedDegree(node);
neighComm(node);
remove(node, nodeComm, neighWeight.get(nodeComm));
int bestComm = nodeComm;
double bestNbLinks = 0;
double bestIncrease = 0;
for(int i = 0; i < neighLast; i++){
double increase = modularityGain(node, neighPos.get(i), neighWeight.get(neighPos.get(i)), wDegree);
if(increase > bestIncrease){
bestComm = neighPos.get(i);
bestNbLinks = neighWeight.get(neighPos.get(i));
bestIncrease = increase;
}
}
insert(node, bestComm, bestNbLinks);
if(bestComm != nodeComm)
nbMoves++;
}
newMod = modularity();
if(nbMoves > 0 && newMod-curMod > minModularity)
improvement = true;
}while(nbMoves > 0 && newMod - curMod > minModularity);
return improvement;
}
/**
* save the hierarchical community structure
* @param structure - the hierarchical community structure
* @param comMap - the mapping of node ids among two ierations
* @param cursor - the maximum id of current node
* @param isTop - whether the node is the root
* @throws Exception -
*/
public void exportCommunity(HashMap<Integer, CommNode> structure, HashMap<Integer, Integer> comMap,
int cursor, boolean isTop) throws Exception{
ArrayList<Integer> renumber = new ArrayList();
renumber.ensureCapacity(size);
for(int i = 0; i < size; i++)
renumber.add(new Integer(-1));
for(int node = 0; node < size; node++)
renumber.set(n2c.get(node), renumber.get(n2c.get(node)) + 1);
int newIndex = 0;
for(int i = 0; i < size; i++)
if(renumber.get(i) != -1)
renumber.set(i, newIndex++);
if(comMap.isEmpty()){
for(int node = 0; node < size; node++){
int parentId = cursor + renumber.get(n2c.get(node));
CommNode comm = new CommNode(node, NodeType.NODE, parentId);
structure.put(node, comm);
comMap.put(parentId-cursor, parentId);
}
}else if(!isTop){
HashMap<Integer, Integer> tempCommMap = new HashMap();
for(int node = 0; node < size; node++){
int nodeId = comMap.get(node);
//System.out.println(nodeId);
int parentId = cursor + renumber.get(n2c.get(node));
CommNode comm = new CommNode(nodeId, NodeType.COMM, parentId);
structure.put(nodeId, comm);
comMap.remove(node);
tempCommMap.put(parentId-cursor, parentId);
}
comMap.clear();
comMap.putAll(tempCommMap);
}else{
for(int node = 0; node < size; node++){
int nodeId = comMap.get(node);
CommNode comm = new CommNode(nodeId, NodeType.COMM, -1);
structure.put(nodeId, comm);
}
comMap.clear();
}
}
public int nonEmptyCommunities(){
TreeSet<Integer> comSet = new TreeSet();
for(int i = 0; i < n2c.size(); i++){
int com = n2c.get(i);
comSet.add(com);
}
return comSet.size();
}
public void readCommunity(String commPath) throws Exception{
BufferedReader br = new BufferedReader(new FileReader(commPath));
String str = br.readLine();
int commId = 0;
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
while(token.hasMoreTokens()){
int nodeId = g.nodeDict.get(token.nextToken());
n2c.set(nodeId, commId);
}
commId++;
str = br.readLine();
}
br.close();
// update the tot and in of the community structure
for(int i = 0; i < size; i++){
tot.set(i, 0.0);
in.set(i, 0.0);
}
for(int i = 0; i < g.nbNodes; i++){
int srcCom = n2c.get(i);
int start = 0;
if(i > 0)
start = g.degrees.get(i-1);
for(int j = start; j < g.degrees.get(i); j++){
int dest = g.links.get(j);
int destCom = n2c.get(dest);
double w = g.weights.get(j);
if(srcCom == destCom){ //if i and dest are in the same community
in.set(srcCom, in.get(srcCom) + w); //update in value of this community
}
tot.set(srcCom, tot.get(srcCom) + w); //update the tot value of community C(i)
}
}
}
public static void writeCommunity(String graphPath, HashMap<Integer, CommNode> commStruc, String commPath) throws Exception{
HashMap<String, Integer> nodeDict = FileUtil.getDict(graphPath);
HashMap<Integer, String> revDict = Utility.reverseDict(nodeDict);
HashMap<Integer, ArrayList<Integer>> commToNode = new HashMap();
BufferedWriter bw = new BufferedWriter(new FileWriter(commPath));
Iterator<Integer> it = commStruc.keySet().iterator();
while(it.hasNext()){
int nodeId = it.next();
if(commStruc.get(nodeId).type == NodeType.NODE){
int pId = nodeId;
while(commStruc.get(pId).pId != -1){
pId = commStruc.get(pId).pId;
}
if(!commToNode.containsKey(pId)){
commToNode.put(pId, new ArrayList());
}
commToNode.get(pId).add(nodeId);
}
}
it = commToNode.keySet().iterator();
while(it.hasNext()){
int commId = it.next();
ArrayList nodeList = commToNode.get(commId);
for(int i = 0; i < nodeList.size()-1; i++){
bw.write(revDict.get(nodeList.get(i)) + "\t");
}
bw.write(revDict.get(nodeList.get(nodeList.size()-1)) + "\r\n");
}
bw.close();
}
/**
* �����ڲ���
* @author shangjiaxing
*
*/
class Graph{
HashMap<String, Integer> nodeDict;
int nbNodes; //number of nodes
int nbLinks; //number of edges;
double totalWeight; //sum of the weight of the links*2 (each link is calculated twice)
ArrayList<Integer> degrees; //the cumulative degree of each node
ArrayList<Integer> links; //the neighbor IDs for each node, together with degrees, we can easily get the neighbors for any node, e.g. the first neighbor ID of node i is: links[degrees[i]]
ArrayList<Double> weights; //the weight of each link
ArrayList<ArrayList<Pair<Integer, Double>>> topology; //The matrix of the graph, the neighbors of i is denoted as topology.get(i)
public Graph(){
nbNodes = 0;
nbLinks = 0;
totalWeight = 0;
degrees = new ArrayList();
links = new ArrayList();
weights = new ArrayList();
topology = new ArrayList();
}
public Graph(String graphPath) throws Exception{
nodeDict = FileUtil.getDict(graphPath);
nbNodes = nodeDict.size();
degrees = new ArrayList();
links = new ArrayList();
weights = new ArrayList();
topology = new ArrayList();
BufferedReader br = new BufferedReader(new FileReader(graphPath));
topology.ensureCapacity(nbNodes);
for(int i = 0; i < nbNodes; i++)
topology.add(new ArrayList());
nbLinks = 0;
totalWeight = 0;
String str = br.readLine().trim();
while(str != null && !str.equals("")){
StringTokenizer token = new StringTokenizer(str, "\t");
int src = nodeDict.get(token.nextToken());
int dest = nodeDict.get(token.nextToken());
double weight = new Double(token.nextToken());
topology.get(src).add(new Pair(dest, weight));
nbLinks++;
if(src != dest){
topology.get(dest).add(new Pair(src, weight));
nbLinks++;
}
str = br.readLine();
}
br.close();
links.ensureCapacity(nbLinks);
weights.ensureCapacity(nbLinks);
for(int i = 0; i < nbNodes; i++){
if(i == 0){
degrees.add(topology.get(i).size());
}else{
degrees.add(degrees.get(i-1).intValue() + topology.get(i).size());
}
for(int j = 0; j < topology.get(i).size(); j++){
Pair<Integer, Double> pair = topology.get(i).get(j);
links.add(pair.first);
weights.add(pair.second);
totalWeight += pair.second;
}
}
topology.clear();
topology = null;
}
public double weightedDegree(int node){
double wDegree = 0;
Pair<ArrayList<Integer>, ArrayList<Double>> p = neighbors(node);
for(int i = 0; i < nbNeighbors(node); i++)
wDegree += p.second.get(i);
return wDegree;
}
public int nbNeighbors(int node){
if(node == 0){
return degrees.get(0);
}else{
return degrees.get(node) - degrees.get(node-1);
}
}
public double nbSelfLoops(int node){
Pair<ArrayList<Integer>, ArrayList<Double>> p = neighbors(node);
for(int i = 0; i < nbNeighbors(node); i++){
if(p.first.get(i).intValue() == node)
return p.second.get(i);
}
return 0;
}
public Pair<ArrayList<Integer>, ArrayList<Double>> neighbors(int node){
ArrayList<Integer> firstList = new ArrayList();
ArrayList<Double> secondList = new ArrayList();
if(node == 0){
for(int i = 0; i < degrees.get(0).intValue(); i++){
firstList.add(links.get(i));
secondList.add(weights.get(i));
}
return new Pair(firstList, secondList);
}
else{
for(int i = degrees.get(node-1); i < degrees.get(node); i++){
firstList.add(links.get(i));
secondList.add(weights.get(i));
}
return new Pair(firstList, secondList);
}
}
public int getNbNodes(){
return nbNodes;
}
public int getNbLinks(){
return nbLinks;
}
public ArrayList<Double> getWeights(){
return weights;
}
public ArrayList<Integer> getLinks(){
return links;
}
public ArrayList<ArrayList<Pair<Integer, Double>>> getTopology(){
return topology;
}
public double getTotalWeight(){
return totalWeight;
}
}
/**
* �����㷨��Ҫ���ڲ���
* @param <T1>
* @param <T2>
*/
class Pair<T1, T2>{
public T1 first;
public T2 second;
public Pair(T1 first, T2 second){
this.first = first;
this.second = second;
}
}
}
| 22,749 | 34.658307 | 193 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/OtherAlgorithms/LearnIncSvm.java | package org.dzhuang.dynamic.OtherAlgorithms;
import java.util.*;
import java.io.*;
import java.text.*;
import libsvm.*;
import org.dzhuang.dynamic.comm.NMI;
import org.dzhuang.dynamic.graph.*;
import org.dzhuang.dynamic.util.*;
import toolbox.*;
import toolbox.lr.*;
import toolbox.svm.*;
public class LearnIncSvm {
svm_model model; //the node SVM classification model
ArrayList<Double> neighWeight; //the weight from node u to its neighbor communities
ArrayList<Integer> neighPos; //the index of node u's neighbor communities
double neighConnMax; //the maximum connections from u to its neighbor communities
int neighLast;
public Graph g; //the graph
public ComGraph cg; // The community graph (each node represents a community)
public int size; //the number of communities, during iterations, there may be empty communities
ArrayList<Integer> n2c; // the belonging ship from nodes to communities
ArrayList<Double> n2cIn; //the connections from each node to its current community
ArrayList<Double> in, tot; //the inner and total degree of the communities
public static double MAX_MERGE_SIZE = 8;
//The empty constructor
public LearnIncSvm(){}
/**
* Initialize the heuristic incremental algorithm
* @param graphPath
* @param comPath
* @param param - the parameters generated by the Logistic Regression Model
* @param comParam - the comParam is used to classify community nodes, while param is used to classify nodes
* @throws Exception
*/
public void init2(String graphPath, String comPath) throws Exception{
readGraph(graphPath);
readCommunity(comPath);
initComGraph();
}
public void init(String graphPath, String comPath, svm_model model) throws Exception{
this.model = model;
readGraph(graphPath);
readCommunity(comPath);
initComGraph();
}
public void init(String graphPath, String comPath) throws Exception{
System.out.println("Initializing...");
readGraph(graphPath);
System.out.println("Graph read! Nodes: " + g.nbNodes + " Edges: " + g.totalWeight/2);
readCommunity(comPath);
}
public HashMap increase(String incPath, int maxPoints, String commOutPath) throws Exception{
HashMap resultMap = new HashMap();
HashMap<String, Integer> nodeDict = g.nodeDict;
ArrayList<Float> modList = new ArrayList();
ArrayList<Float> timeList = new ArrayList();
ArrayList<Integer> comList = new ArrayList();
for(int point = 0; point < maxPoints; point++){
long t1 = System.currentTimeMillis();
File incFile = new File(FileUtil.extendFileName(incPath, "_" + (point+1)));
if(!incFile.exists())
break;
ArrayList<Data> dataList = FileUtil.readData(incFile.getAbsolutePath());
int start = 0;
while(start < dataList.size()){
TreeMap<Link, Double> deltaG = new TreeMap();
start = readNextBatch(deltaG, dataList, start, FileUtil.BY_MONTH); //read the next batch of incremental data into linkSet
if(deltaG.size() == 0) // if there is no change
continue;
updateCommunityStructure(deltaG);
}
long t2= System.currentTimeMillis();
double mod = modularity();
float time = (float)(t2-t1)/1000;
int communities = nonEmptyCommunities();
this.writeCommunity(FileUtil.extendFileName(commOutPath, "_" + (point+1)));
modList.add((float)mod);
timeList.add(time);
comList.add(communities);
System.out.println("Q" + (point+1) + ": " + (float)mod + " Time: " + time + " Communities: " + communities);
//outputCommunityStatistics();
}
resultMap.put("modList", modList);
resultMap.put("timeList", timeList);
resultMap.put("comList", comList);
return resultMap;
}
public HashMap increaseNoComOutput(String incPath, int maxPoints, String dataSet) throws Exception{
long t0_1 = System.currentTimeMillis();
HashMap resultMap = new HashMap();
HashMap<String, Integer> nodeDict = g.nodeDict;
ArrayList<Double> modList = new ArrayList();
ArrayList<Long> timeList = new ArrayList();
ArrayList<Integer> comList = new ArrayList();
long t0_2 = System.currentTimeMillis();
for(int point = 0; point < maxPoints; point++){
long t1 = System.currentTimeMillis();
File incFile = new File(FileUtil.extendFileName(incPath, "_" + (point+1)));
if(!incFile.exists())
break;
ArrayList<Data> dataList = FileUtil.readData(incFile.getAbsolutePath());
//TODO
this.model = svm.svm_load_model("data2/"+dataSet+"/"+dataSet+"_model_SVM_"+point+".txt");
int start = 0;
while(start < dataList.size()){
TreeMap<Link, Double> deltaG = new TreeMap();
start = readNextBatch(deltaG, dataList, start, FileUtil.BY_MONTH); //read the next batch of incremental data into linkSet
if(deltaG.size() == 0) // if there is no change
continue;
updateCommunityStructure(deltaG);
}
double mod = modularity();
modList.add(mod);
int communities = nonEmptyCommunities();
comList.add(communities);
String tmpComPath = "comm.tmp";
this.writeCommunity(tmpComPath);
this.writeCommunity("data2/"+dataSet+"/"+dataSet+"_LearnIncSVM_community_"+(point+1)+".txt");
FileUtil.deleteFile(tmpComPath);
long t2= System.currentTimeMillis();
long time = t2-t1+t0_2-t0_1;
timeList.add(time);
System.out.println("Q" + (point+1) + ": " + mod + " Time: " + time + " Communities: " + communities);
//outputCommunityStatistics();
}
resultMap.put("modList", modList);
resultMap.put("timeList", timeList);
resultMap.put("comList", comList);
return resultMap;
}
public HashMap increasePeriod(String incPath, int periodMonth, String baseComPath) throws Exception{
HashMap resultMap = new HashMap();
HashMap<String, Integer> nodeDict = g.nodeDict;
ArrayList<Float> modList = new ArrayList();
ArrayList<Float> timeList = new ArrayList();
ArrayList<Integer> comList = new ArrayList();
ArrayList<Float> nmiList = new ArrayList();
ArrayList<Data> dataList = new ArrayList();
for(int point = 0; point < 10000; point++){
File incFile = new File(FileUtil.extendFileName(incPath, "_" + (point+1)));
if(!incFile.exists()){
if(dataList.size() > 0){
TreeMap<Link, Double> deltaG = new TreeMap();
readBatch(deltaG, dataList, 0, periodMonth);
dataList = new ArrayList();
long t1 = System.currentTimeMillis();
updateCommunityStructure(deltaG);
long t2 = System.currentTimeMillis();
double mod = modularity();
float time = (float)(t2-t1)/1000;
int communities = nonEmptyCommunities();
String realComPath = FileUtil.extendFileName(baseComPath, "_" + (point));
String tmpComPath = "comm.tmp";
this.writeCommunity(tmpComPath);
float nmi = (float)NMI.getNMI(realComPath, tmpComPath);
modList.add((float)mod);
timeList.add(time);
comList.add(communities);
nmiList.add(nmi);
FileUtil.deleteFile(tmpComPath);
System.out.println("Q" + (point+1) + ": " + (float)mod + " Time: " + time + " Communities: " + communities + " NMI: " + nmi);
}
break;
}
dataList.addAll(FileUtil.readData(incFile.getAbsolutePath()));
if((point+1) % periodMonth == 0){
TreeMap<Link, Double> deltaG = new TreeMap();
readBatch(deltaG, dataList, 0, periodMonth);
dataList = new ArrayList();
long t1 = System.currentTimeMillis();
updateCommunityStructure(deltaG);
long t2 = System.currentTimeMillis();
double mod = modularity();
float time = (float)(t2-t1)/1000;
int communities = nonEmptyCommunities();
String realComPath = FileUtil.extendFileName(baseComPath, "_" + (point+1));
String tmpComPath = "comm.tmp";
this.writeCommunity(tmpComPath);
float nmi = (float)NMI.getNMI(realComPath, tmpComPath);
modList.add((float)mod);
timeList.add(time);
comList.add(communities);
nmiList.add(nmi);
FileUtil.deleteFile(tmpComPath);
System.out.println("Q" + (point+1) + ": " + (float)mod + " Time: " + time + " Communities: " + communities + " NMI: " + nmi);
//outputCommunityStatistics();
}
}
resultMap.put("modList", modList);
resultMap.put("timeList", timeList);
resultMap.put("comList", comList);
resultMap.put("nmiList", nmiList);
return resultMap;
}
public HashMap increaseInitial(String incPath, int initPoint, String baseComPath) throws Exception{
HashMap resultMap = new HashMap();
HashMap<String, Integer> nodeDict = g.nodeDict;
ArrayList<Float> modList = new ArrayList();
ArrayList<Float> timeList = new ArrayList();
ArrayList<Integer> comList = new ArrayList();
ArrayList<Float> nmiList = new ArrayList();
for(int point = initPoint; point < 10000; point++){
long t1 = System.currentTimeMillis();
File incFile = new File(FileUtil.extendFileName(incPath, "_" + (point+1)));
if(!incFile.exists())
break;
ArrayList<Data> dataList = FileUtil.readData(incFile.getAbsolutePath());
int start = 0;
while(start < dataList.size()){
TreeMap<Link, Double> deltaG = new TreeMap();
start = readNextBatch(deltaG, dataList, start, FileUtil.BY_MONTH); //read the next batch of incremental data into linkSet
if(deltaG.size() == 0) // if there is no change
continue;
updateCommunityStructure(deltaG);
}
long t2= System.currentTimeMillis();
double mod = modularity();
float time = (float)(t2-t1)/1000;
int communities = nonEmptyCommunities();
String realComPath = FileUtil.extendFileName(baseComPath, "_" + (point+1));
String tmpComPath = "comm.tmp";
this.writeCommunity(tmpComPath);
float nmi = (float)NMI.getNMI(realComPath, tmpComPath);
modList.add((float)mod);
timeList.add(time);
comList.add(communities);
nmiList.add(nmi);
FileUtil.deleteFile(tmpComPath);
System.out.println("Q" + (point+1) + ": " + (float)mod + " Time: " + time + " Communities: " + communities + " NMI: " + nmi);
//outputCommunityStatistics();
}
resultMap.put("modList", modList);
resultMap.put("timeList", timeList);
resultMap.put("comList", comList);
resultMap.put("nmiList", nmiList);
return resultMap;
}
public static Param readParam(String paramPath, int paramNum) throws Exception{
Param param = new Param(paramNum, 0);
BufferedReader br = new BufferedReader(new FileReader(paramPath));
String str = br.readLine();
str = str.substring(str.indexOf('=')+1);
StringTokenizer token = new StringTokenizer(str, "[, ];");
for(int i = 0; i < paramNum;i++){
param.data[i] = new Double(token.nextToken());
}
br.close();
return param;
}
public static double readPrecision(String paramPath) throws Exception{
BufferedReader br = new BufferedReader(new FileReader(paramPath));
String str = br.readLine();
str = str.substring(str.indexOf('=')+1, str.lastIndexOf(';'));
double precision = new Double(str);
br.close();
return precision;
}
public static double readRecall(String paramPath) throws Exception{
BufferedReader br = new BufferedReader(new FileReader(paramPath));
String str = br.readLine();
str = br.readLine();
str = str.substring(str.indexOf('=')+1, str.lastIndexOf(';'));
double recall = new Double(str);
br.close();
return recall;
}
public void readGraph(String graphPath) throws Exception{
this.g = new Graph(graphPath);
neighWeight = new ArrayList();
neighPos = new ArrayList();
n2c = new ArrayList();
n2cIn = new ArrayList();
in = new ArrayList();
tot = new ArrayList();
size = g.nbNodes;
neighWeight.ensureCapacity(size);
neighPos.ensureCapacity(size);
for(int i = 0; i < size; i++){
neighWeight.add(new Double(-1.0));
neighPos.add(new Integer(-1));
}
neighLast = 0;
n2c.ensureCapacity(size);
n2cIn.ensureCapacity(size);
in.ensureCapacity(size);
tot.ensureCapacity(size);
//initialize
for(int i = 0; i < size; i++){
n2c.add(i);
n2cIn.add(0.0);
tot.add(g.weightedDegree(i));
in.add(g.nbSelfLoops(i));
}
}
public void readCommunity(String commPath) throws Exception{
BufferedReader br = new BufferedReader(new FileReader(commPath));
String str = br.readLine();
int commId = 0;
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
while(token.hasMoreTokens()){
int nodeId = g.nodeDict.get(token.nextToken());
n2c.set(nodeId, commId);
}
commId++;
str = br.readLine();
}
br.close();
// update the tot and in of the community structure
for(int i = 0; i < size; i++){
tot.set(i, 0.0);
in.set(i, 0.0);
}
for(int i = 0; i < g.nbNodes; i++){
int srcCom = n2c.get(i);
ArrayList<Pair<Integer, Double>> neighList = g.topology.get(i);
for(int j = 0; j < neighList.size(); j++){
Pair<Integer, Double> p = neighList.get(j);
int dest = p.first;
int destCom = n2c.get(dest);
double w = p.second;
if(srcCom == destCom){ //if i and dest are in the same community
n2cIn.set(i, n2cIn.get(i) + w);
in.set(srcCom, in.get(srcCom) + w); //update in value of this community
}
tot.set(srcCom, tot.get(srcCom) + w); //update the tot value of community C(i)
}
}
}
public void initComGraph(){
cg = new ComGraph();
for(int i = 0; i < g.topology.size(); i++){
int srcCom = n2c.get(i);
if(!cg.topology.containsKey(srcCom))
cg.topology.put(srcCom, new ComNode());
ComNode srcNode = cg.topology.get(srcCom);
srcNode.size++;
ArrayList<Pair<Integer, Double>> neighborList = g.topology.get(i);
for(int j = 0; j < neighborList.size(); j++){
Pair<Integer, Double> p = neighborList.get(j);
int destCom = n2c.get(p.first);
double w = p.second;
if(srcCom == destCom){ //if i and j are in the same community
srcNode.inK += w;
}
srcNode.totK += w;
if(!srcNode.neighbors.containsKey(destCom)){
srcNode.neighbors.put(destCom, w);
}
else{
srcNode.neighbors.put(destCom, srcNode.neighbors.get(destCom) + w);
}
cg.totalWeight += w;
}
}
cg.nbNodes = cg.topology.size();
}
public ArrayList<Data> readAllData(String incPath, int from) throws Exception{
ArrayList<Data> dataList = new ArrayList();
while(true){
String incFilePath = FileUtil.extendFileName(incPath, "_" + from);
File incFile = new File(incFilePath);
if(!incFile.exists())
break;
dataList.addAll(FileUtil.readData(incFilePath));
}
return dataList;
}
//read the next batch of data, put them into a change graph represented by deltaG
public int readNextBatch(TreeMap<Link, Double> deltaG, ArrayList<Data> dataList, int start, int periodType) throws Exception{
int end = start;
long startTime = dataList.get(start).timestamp;
long endTime = startTime + 1; //update by Second
if(periodType == FileUtil.BY_MINUTE)
endTime = startTime + 60;
else if(periodType == FileUtil.BY_HOUR)
endTime = startTime + 3600;
else if(periodType == FileUtil.BY_DAY)
endTime = startTime + 24 * 3600;
else if(periodType == FileUtil.BY_WEEK)
endTime = startTime + 7 * 24 * 3600;
else if(periodType == FileUtil.BY_MONTH)
endTime = DateUtil.nextMonth(startTime);
else if(periodType == FileUtil.BY_TWO_MONTH)
endTime = DateUtil.nextKMonth(startTime, 2);
else if(periodType == FileUtil.BY_YEAR)
endTime = startTime + 365 * 24 * 3600;
//parse the data
for(end = start; end < dataList.size(); end++){
Data data = dataList.get(end);
if(data.timestamp >= endTime)
break;
if(!g.nodeDict.containsKey(data.from))
g.nodeDict.put(data.from, g.nodeDict.size());
if(!g.nodeDict.containsKey(data.to))
g.nodeDict.put(data.to, g.nodeDict.size());
int src = g.nodeDict.get(data.from);
int dest = g.nodeDict.get(data.to);
Link link = new Link(src, dest);
if(src < g.nbNodes && dest < g.nbNodes && g.linkMap.containsKey(link)){
continue;
}
deltaG.put(link, 1.0);
}
return end;
}
/**
* Read the incremental data of several months
* @param deltaG
* @param dataList
* @param start
* @param periodMonth
* @return
* @throws Exception
*/
public int readBatch(TreeMap<Link, Double> deltaG, ArrayList<Data> dataList, int start, int periodMonth) throws Exception{
int end = start;
long startTime = dataList.get(start).timestamp;
long endTime = DateUtil.nextKMonth(startTime, periodMonth);
//parse the data
for(end = start; end < dataList.size(); end++){
Data data = dataList.get(end);
if(data.timestamp >= endTime)
break;
if(!g.nodeDict.containsKey(data.from))
g.nodeDict.put(data.from, g.nodeDict.size());
if(!g.nodeDict.containsKey(data.to))
g.nodeDict.put(data.to, g.nodeDict.size());
int src = g.nodeDict.get(data.from);
int dest = g.nodeDict.get(data.to);
Link link = new Link(src, dest);
if(src < g.nbNodes && dest < g.nbNodes && g.linkMap.containsKey(link)){
continue;
}
deltaG.put(link, 1.0);
}
if(end == dataList.size() && dataList.get(end-1).timestamp < endTime) //if the final batch of data is incomplete
end = -1;
return end;
}
/**
* update the community structure according to the change of the graph
* @param deltaG - the change of graph
* @throws Exception
*/
public void updateCommunityStructure(TreeMap<Link, Double> deltaG) throws Exception{
//Firstly extend the capacity of the Graph and Community
int oldNbNodes = g.nbNodes; // oldNbNodes is used to identify the newly added nodes
while(size < g.nodeDict.size()){
cg.nbNodes++;
ComNode comNode = new ComNode();
comNode.size = 1;
cg.topology.put(size, comNode);
neighWeight.add(-1.0);
neighPos.add(-1);
n2c.add(n2c.size());
n2cIn.add(0.0);
in.add(0.0);
tot.add(0.0);
g.topology.add(new ArrayList<Pair<Integer, Double>>());
g.nbNodes++;
size++;
}
//read the change part of the graph from deltaG
// we put links into an array because we will use them again
Link links[] = (Link []) deltaG.keySet().toArray(new Link[deltaG.size()]);
for(int i = 0; i < links.length; i++){
Link link = links[i];
double w = deltaG.get(link);
//update the graph topology
g.linkMap.put(new Link(link.src, link.dest), w);
g.topology.get(link.src).add(new Pair(link.dest, w));
g.nbLinks++;
g.totalWeight += w;
if(link.src != link.dest){
g.topology.get(link.dest).add(new Pair(link.src, w));
g.nbLinks++;
g.totalWeight += w;
}
}
// initialize the community structure by putting every new node into a singleton community
TreeSet<Integer> nodeToUpdate = new TreeSet();
for(int i = 0; i < links.length; i++){
Link link = links[i];
double w = deltaG.get(link);
int srcCom = n2c.get(link.src);
int destCom = n2c.get(link.dest);
ComNode srcNode = cg.topology.get(srcCom);
ComNode destNode = cg.topology.get(destCom);
if(srcCom == destCom){
in.set(srcCom, in.get(srcCom) + 2*w);
srcNode.inK += 2*w;
n2cIn.set(link.src, n2cIn.get(link.src) + w);
n2cIn.set(link.dest, n2cIn.get(link.dest) + w);
}
tot.set(srcCom, tot.get(srcCom) + w);
tot.set(destCom, tot.get(destCom) + 1*w);
srcNode.totK += w;
destNode.totK += w;
if(srcNode.neighbors.containsKey(destCom)){
srcNode.neighbors.put(destCom, srcNode.neighbors.get(destCom) + 1);
destNode.neighbors.put(srcCom, destNode.neighbors.get(srcCom) + 1);
}
else{
srcNode.neighbors.put(destCom, 1.0);
destNode.neighbors.put(srcCom, 1.0);
}
cg.totalWeight += 2*w;
nodeToUpdate.add(link.src);
nodeToUpdate.add(link.dest);
}
//put the nodes in deltaG in to Priority Queue
ArrayList<Integer> nodeList = new ArrayList();
Iterator<Integer> it = nodeToUpdate.iterator();
while(it.hasNext()){
int node = it.next();
SvmSample sample = getNodeSample(node);
if(sample.type == SampleType.POSITIVE)
nodeList.add(node);
}
//System.out.println("Move node: " + nodeList.size() + "/" + nodeToUpdate.size());
while(nodeList.size() > 0){
//System.out.println("Node to move: " + nodeList.size());
HashSet<Integer> nextSet = refine(nodeList); //the core step
nodeList.clear();
nodeList.addAll(nextSet);
}
//after the nodes are moved, we next move the communities
HashMap<Integer, ArrayList<Integer>> c2n = getCommunityToNode();
ArrayList<Integer> comList = new ArrayList();
it = c2n.keySet().iterator();
comList.addAll(c2n.keySet());
//System.out.println("Move com: " + comList.size() + "/" + c2n.size());
while(comList.size() > 0){
//System.out.println("Com to move: " + comList.size());
HashSet<Integer> nextSet = refineCom(c2n, comList);
comList.clear();
comList.addAll(nextSet);
}
}
public HashSet<Integer> refine(ArrayList<Integer> nodeList){
ArrayList<Integer> orderList = Utility.randomOrderList(nodeList.size());
HashSet<Integer> updateSet = new HashSet();
int nbMoves = 0;
//move node from its current community to the one which gives the maximum gain in modularity
for(int nodeTmp = 0; nodeTmp < nodeList.size(); nodeTmp++){
int node = nodeList.get(nodeTmp);
int nodeComm = n2c.get(node);
double wDegree = g.weightedDegree(node);
neighComm(node);
remove(node, nodeComm, neighWeight.get(nodeComm));
int bestComm = nodeComm;
double bestNbLinks = 0;
double bestIncrease = 0;
for(int i = 0; i < neighLast; i++){
double increase = modularityGain(node, neighPos.get(i), neighWeight.get(neighPos.get(i)), wDegree);
if(increase > bestIncrease){
bestComm = neighPos.get(i);
bestNbLinks = neighWeight.get(bestComm);
bestIncrease = increase;
}
}
//before insert node into bestComm, we should update the cg
if(bestComm != nodeComm){
for(int i = 0; i < neighLast; i++){
int neighCom = neighPos.get(i);
//System.out.println(neighWeight.get(neighCom));
if(neighCom != nodeComm){ //first move node out of nodeComm
cg.increaseWeight(nodeComm, neighCom, -1 * neighWeight.get(neighCom));
}
}
ComNode comNode = cg.topology.get(nodeComm);
comNode.inK -= 2 * neighWeight.get(nodeComm);
if(comNode.neighbors.containsKey(nodeComm))
comNode.neighbors.put(nodeComm, comNode.neighbors.get(nodeComm) - 2 * neighWeight.get(nodeComm));
comNode.totK -= wDegree;
comNode.size--;
for(int i = 0; i < neighLast; i++){
int neighCom = neighPos.get(i);
if(neighCom != bestComm){ // secondly move node into bestComm
cg.increaseWeight(bestComm, neighCom, neighWeight.get(neighCom));
}
}
ComNode bestNode = cg.topology.get(bestComm);
bestNode.inK += 2 * neighWeight.get(bestComm);
if(bestNode.neighbors.containsKey(bestComm))
bestNode.neighbors.put(bestComm, bestNode.neighbors.get(bestComm) + 2 * neighWeight.get(bestComm));
else
bestNode.neighbors.put(bestComm, 2 * neighWeight.get(bestComm));
bestNode.totK += wDegree;
bestNode.size++;
//If the community is empty, remove it.
if(comNode.totK == 0){
cg.removeEmptyComm(nodeComm);
//System.out.println("Community removed!");
}
}
insert(node, bestComm, bestNbLinks);
n2cIn.set(node, bestNbLinks);
if(bestComm != nodeComm){
nbMoves++;
ArrayList<Pair<Integer, Double>> neighbors = g.topology.get(node);
for(int i = 0; i < neighbors.size(); i++){
Pair<Integer, Double> p = neighbors.get(i);
int neigh = p.first;
double w = p.second;
int neighCom = n2c.get(neigh);
if(neighCom == nodeComm)
n2cIn.set(neigh, n2cIn.get(neigh) - w);
else if(neighCom == bestComm)
n2cIn.set(neigh, n2cIn.get(neigh) + w);
SvmSample sample = getNodeSample(neigh);
if(sample.type == SampleType.POSITIVE)
updateSet.add(neigh);
}
}
}
//System.out.println("nbMoves: " + nbMoves);
return updateSet;
}
public HashSet<Integer> refineCom(HashMap<Integer, ArrayList<Integer>> c2n, ArrayList<Integer> comList){
HashSet<Integer> updateSet = new HashSet();
for(int comTmp = 0; comTmp < comList.size(); comTmp++){
int com = comList.get(comTmp);
ComNode node = cg.topology.get(com);
if(node.size > MAX_MERGE_SIZE){
continue;
}
double bestIncrease = 0;
int bestCom = com;
double bestConn = 0;
Iterator<Map.Entry<Integer, Double>> it = node.neighbors.entrySet().iterator();
while(it.hasNext()){
Map.Entry<Integer, Double> entry = it.next();
int neighCom = entry.getKey();
if(neighCom == com)
continue;
double neighConn = entry.getValue();
ComNode neighNode = cg.topology.get(neighCom);
// if(neighNode.size > g.nbNodes/10)
// continue;
double increase = neighConn - node.totK * neighNode.totK / cg.totalWeight;
if(increase > bestIncrease){
bestIncrease = increase;
bestCom = neighCom;
bestConn = neighConn;
}
}
if(bestCom != com){
cg.mergeComm(com, bestCom, bestConn);
mergeCommunity(c2n, com, bestCom, bestConn);
if(cg.topology.get(bestCom).size <= MAX_MERGE_SIZE)
updateSet.add(bestCom); //put this updated community into update Set
updateSet.remove(com);
//System.out.println("Inc: " + Parameter.df.format(bestIncrease) + "Com merge: " + com + " -> " + bestCom);
}
}
return updateSet;
}
public double modularity(){
double q = 0;
double m2 = (double)g.totalWeight;
for(int i = 0; i < size; i++){
if(tot.get(i) > 0){
q += in.get(i)/m2 - Math.pow(tot.get(i).doubleValue()/m2, 2);
}
}
return q;
}
public int nonEmptyCommunities(){
TreeSet<Integer> comSet = new TreeSet();
for(int i = 0; i < n2c.size(); i++){
int com = n2c.get(i);
comSet.add(com);
}
return comSet.size();
}
public double modularityGain(int node, int comm, double dnodecomm, double wDegree){
double totc = tot.get(comm).doubleValue();
double degc = wDegree;
double m2 = g.totalWeight;
double dnc = dnodecomm;
return (dnc - totc*degc/m2);
}
public void remove(int node, int comm, double dnodecomm){
tot.set(comm, tot.get(comm) - g.weightedDegree(node));
in.set(comm, in.get(comm) - 2*dnodecomm - g.nbSelfLoops(node));
n2c.set(node, -1);
}
public void insert(int node, int comm, double dnodecomm){
tot.set(comm, tot.get(comm) + g.weightedDegree(node));
in.set(comm, in.get(comm) + 2*dnodecomm + g.nbSelfLoops(node));
n2c.set(node, comm);
}
//move node form its current community to destCom
public void move(int node, int destCom){
neighComm(node);
int srcCom = n2c.get(node);
double wDegree = g.weightedDegree(node);
remove(node, n2c.get(node), neighWeight.get(srcCom));
if(srcCom != destCom){
for(int i = 0; i < neighLast; i++){
int neighCom = neighPos.get(i);
if(neighCom != srcCom){ //first move node out of nodeComm
cg.increaseWeight(srcCom, neighCom, -1 * neighWeight.get(neighCom));
}
}
ComNode comNode = cg.topology.get(srcCom);
comNode.inK -= 2 * neighWeight.get(srcCom);
if(comNode.neighbors.containsKey(srcCom))
comNode.neighbors.put(srcCom, comNode.neighbors.get(srcCom) - 2 * neighWeight.get(srcCom));
comNode.totK -= wDegree;
comNode.size--;
for(int i = 0; i < neighLast; i++){
int neighCom = neighPos.get(i);
if(neighCom != destCom){ // secondly move node into bestComm
cg.increaseWeight(destCom, neighCom, neighWeight.get(neighCom));
}
}
ComNode bestNode = cg.topology.get(destCom);
bestNode.inK += 2 * neighWeight.get(destCom);
if(bestNode.neighbors.containsKey(destCom))
bestNode.neighbors.put(destCom, bestNode.neighbors.get(destCom) + 2 * neighWeight.get(destCom));
else
bestNode.neighbors.put(destCom, 2 * neighWeight.get(destCom));
bestNode.totK += wDegree;
bestNode.size++;
//If the community is empty, remove it.
if(comNode.totK == 0){
cg.removeEmptyComm(srcCom);
//System.out.println("Community removed!");
}
}
insert(node, destCom, neighWeight.get(destCom));
n2cIn.set(node, neighWeight.get(destCom));
//update n2cIn
ArrayList<Pair<Integer, Double>> neighbors = g.topology.get(node);
for(int i = 0; i < neighbors.size(); i++){
Pair<Integer, Double> p = neighbors.get(i);
int neigh = p.first;
double w = p.second;
int neighCom = n2c.get(neigh);
if(neighCom == srcCom)
n2cIn.set(neigh, n2cIn.get(neigh) - w);
else if(neighCom == destCom)
n2cIn.set(neigh, n2cIn.get(neigh) + w);
}
}
//create a new singleton community for the node
public int insertSingleton(int node){
double k = g.weightedDegree(node);
int commId = 0; //find a usable community id
while(tot.get(commId) > 0)
commId++;
tot.set(commId, k);
in.set(commId, 0.0);
n2c.set(node, commId);
return commId;
}
// generate the neighborhood communities of node
// this operation will change list neighWeight, neighPos
public void neighComm(int node){
for(int i = 0; i < neighLast; i++)
neighWeight.set(neighPos.get(i), -1.0);
neighLast = 0;
neighConnMax = 0;
ArrayList<Pair<Integer, Double>> neighList = g.topology.get(node);
int deg = g.nbNeighbors(node);
//System.out.println("node: " + node + " n2c: " + n2c.get(node));
neighPos.set(0, n2c.get(node));
neighWeight.set(neighPos.get(0), 0.0);
neighLast = 1;
for(int i = 0; i < deg; i++){
int neigh = neighList.get(i).first;
int neighComm = n2c.get(neigh);
double neighW = neighList.get(i).second;
if(neigh != node){
if(neighWeight.get(neighComm).intValue() == -1){
neighWeight.set(neighComm, 0.0);
neighPos.set(neighLast++, neighComm);
}
neighWeight.set(neighComm, neighWeight.get(neighComm) + neighW);
if(neighComm != neighPos.get(0) && neighWeight.get(neighComm) > neighConnMax)
neighConnMax = neighWeight.get(neighComm);
}
}
}
public HashMap<Integer, ArrayList<Integer>> getCommunityToNode(){
HashMap<Integer, ArrayList<Integer>> c2n = new HashMap();
for(int i = 0; i < g.nbNodes; i++){
int com = n2c.get(i);
if(!c2n.containsKey(com))
c2n.put(com, new ArrayList());
c2n.get(com).add(i);
}
return c2n;
}
//merge the community from srcCom to destCom
public void mergeCommunity(HashMap<Integer, ArrayList<Integer>> c2n, int srcCom, int destCom, double conn){
ArrayList<Integer> sList = c2n.get(srcCom);
ArrayList<Integer> dList = c2n.get(destCom);
//firstly update n2cs
for(int i = 0; i < sList.size(); i++){
int node = sList.get(i);
int com = n2c.get(node);
ArrayList<Pair<Integer, Double>> neighbors = g.topology.get(node);
for(int j = 0; j < neighbors.size(); j++){
Pair<Integer, Double> p = neighbors.get(j);
int neigh = p.first;
double w = p.second;
int neighCom = n2c.get(neigh);
if(neighCom == destCom){
n2cIn.set(node, n2cIn.get(node) + w);
n2cIn.set(neigh, n2cIn.get(neigh) + w);
}
}
}
for(int i = 0; i < sList.size(); i++){
n2c.set(sList.get(i), destCom);
dList.add(sList.get(i));
}
in.set(destCom, in.get(destCom) + in.get(srcCom) + 2*conn);
tot.set(destCom, tot.get(destCom) + tot.get(srcCom));
in.set(srcCom, 0.0);
tot.set(srcCom, 0.0);
sList.clear();
}
//get the sample of a node according to the Logistic Regression Model
public SvmSample getNodeSample(int node){
double k = g.weightedDegree(node);
double kIn = n2cIn.get(node);
Sample sample = new Sample(new double[]{1, k, kIn}, SampleType.NEGATIVE);
sample.toLogValue(1);
SvmSample s = ClassifierUtil.parseSvmSample(sample);
double v = svm.svm_predict(model, s.x);
if(v > 0)
s.type = SampleType.POSITIVE;
return s;
}
public void outputCommunityStatistics(){
int comNum = 0, maxSize=0, minSize=1000000;
float avgSize = 0;
HashMap<Integer, Integer> sizeMap = new HashMap();
ArrayList<Integer> sizeList = new ArrayList();
ArrayList<Float> modList = new ArrayList();
ArrayList<Float> inList = new ArrayList();
ArrayList<Float> totList = new ArrayList();
for(int i = 0; i < n2c.size(); i++){
int com = n2c.get(i);
if(!sizeMap.containsKey(com))
sizeMap.put(com, 0);
sizeMap.put(com, sizeMap.get(com) + 1);
}
Iterator<Integer> it = sizeMap.keySet().iterator();
double m2 = g.totalWeight;
while(it.hasNext()){
int com = it.next();
int size = sizeMap.get(com);
double mod = in.get(com)/m2 - Math.pow(tot.get(com).doubleValue()/m2, 2);
if(size > maxSize)
maxSize = size;
if(size < minSize)
minSize = size;
sizeList.add(size);
modList.add((float)(mod * m2));
inList.add((float)in.get(com).doubleValue());
totList.add((float)tot.get(com).doubleValue());
}
//sort the results by community size
int tmp1;
float tmp2;
for(int i = 0; i < sizeList.size()-1; i++){
for(int j = i+1; j < sizeList.size(); j++){
if(sizeList.get(i) > sizeList.get(j) || (sizeList.get(i) == sizeList.get(j) && totList.get(i) > totList.get(j))){
Utility.listSwap(sizeList, i, j);
Utility.listSwap(modList, i, j);
Utility.listSwap(inList, i, j);
Utility.listSwap(totList, i, j);
}
}
}
int com8 = 0, com5 = 0; //the number of communities which cotains 80% and 50% of the nodes
int totalSize = 0;
for(int i = sizeList.size()-1; i>=0; i--){
totalSize += sizeList.get(i);
if((double)totalSize / g.nbNodes < 0.8)
com8++;
if((double) totalSize / g.nbNodes < 0.5)
com5++;
}
comNum = sizeMap.size();
avgSize = g.nbNodes / comNum;
System.out.println("Modularity: " + (float)modularity() + " M2: " + g.totalWeight);
System.out.println("#Communities: " + comNum + " Average Size: " + avgSize + " Max Size: " + maxSize + " Min Size: " + minSize);
System.out.println("#Communities for 50% nodes: " + com5 + " #Communities for 80% nodes: " + com8);
// System.out.println("size=" + sizeList + ";");
// System.out.println("Qc=" + modList + ";");
// System.out.println("in=" + inList + ";");
// System.out.println("tot=" + totList + ";");
}
public void writeCommunity(String outPath) throws Exception{
HashMap<Integer, String> revDict = Utility.reverseDict(g.nodeDict);
HashMap<Integer, ArrayList<Integer>> comToNode = new HashMap();
for(int i = 0; i < n2c.size(); i++){
int com = n2c.get(i);
if(!comToNode.containsKey(com))
comToNode.put(com, new ArrayList());
comToNode.get(com).add(i);
}
//write community
BufferedWriter bw = new BufferedWriter(new FileWriter(outPath));
Iterator<Integer> it = comToNode.keySet().iterator();
while(it.hasNext()){
int com = it.next();
ArrayList<Integer> nodeList = comToNode.get(com);
bw.write(revDict.get(nodeList.get(0)));
for(int i = 1; i < nodeList.size(); i++){
bw.write("\t" + revDict.get(nodeList.get(i)));
}
bw.write("\r\n");
}
bw.close();
}
public void writeGraph(String outPath) throws Exception{
HashMap<Integer, String> revDict = Utility.reverseDict(g.nodeDict);
TreeSet<LabelEdge> edgeSet = new TreeSet();
Iterator<Link> it = g.linkMap.keySet().iterator();
while(it.hasNext()){
Link link = it.next();
String from = revDict.get(link.src);
String to = revDict.get(link.dest);
LabelEdge edge = new LabelEdge(from, to);
edgeSet.add(edge);
}
//write graph
BufferedWriter bw = new BufferedWriter(new FileWriter(outPath));
Iterator<LabelEdge> it1 = edgeSet.iterator();
while(it1.hasNext()){
LabelEdge edge = it1.next();
bw.write(edge.src + "\t" + edge.dest + "\t1\r\n");
}
bw.close();
}
/**
*
* @author shangjiaxing
*
*/
public class Graph{
HashMap<String, Integer> nodeDict; //mapping the node label (String) to node id (Integer)
public int nbNodes; //number of nodes
public int nbLinks; //number of edges;
public double totalWeight; //sum of the weight of the links*2 (each link is calculated twice)
ArrayList<ArrayList<Pair<Integer, Double>>> topology; //The matrix of the graph, the neighbors of i is denoted as topology.get(i)
TreeMap<Link, Double> linkMap;
public Graph(){
nbNodes = 0;
nbLinks = 0;
totalWeight = 0;
topology = new ArrayList();
}
public Graph(String graphPath) throws Exception{
nodeDict = FileUtil.getDict(graphPath);
nbNodes = nodeDict.size();
topology = new ArrayList();
BufferedReader br = new BufferedReader(new FileReader(graphPath));
topology.ensureCapacity(nbNodes);
this.linkMap = new TreeMap();
for(int i = 0; i < nbNodes; i++)
topology.add(new ArrayList());
nbLinks = 0;
totalWeight = 0;
String str = br.readLine().trim();
while(str != null && !str.equals("")){
StringTokenizer token = new StringTokenizer(str, "\t");
int src = nodeDict.get(token.nextToken());
int dest = nodeDict.get(token.nextToken());
double weight = new Double(token.nextToken());
linkMap.put(new Link(src, dest), weight);
topology.get(src).add(new Pair(dest, weight));
nbLinks++;
totalWeight += weight; //to support weighted network
if(src != dest){
topology.get(dest).add(new Pair(src, weight));
nbLinks++;
totalWeight += weight;
}
str = br.readLine();
}
br.close();
}
// public double weightedDegree(int node){
// double wDegree = 0;
// ArrayList<Pair<Integer, Double>> neighList = topology.get(node);
// for(int i = 0; i < neighList.size(); i++){
// wDegree += neighList.get(i).second;
// }
// return wDegree;
// }
public double weightedDegree(int node){
return (double)g.topology.get(node).size();
}
public int nbNeighbors(int node){
return topology.get(node).size();
}
public double nbSelfLoops(int node){
ArrayList<Pair<Integer, Double>> neighList = topology.get(node);
for(int i = 0; i < neighList.size(); i++){
Pair<Integer, Double> p = neighList.get(i);
if(node == p.first.intValue())
return p.second;
}
return 0;
}
public int getNbNodes(){
return nbNodes;
}
public int getNbLinks(){
return nbLinks;
}
public ArrayList<ArrayList<Pair<Integer, Double>>> getTopology(){
return topology;
}
public double getTotalWeight(){
return totalWeight;
}
}
/**
* The community graph, where nodes represent communities and edges represent
* the connections among communities
* @author shangjiaxing
*
*/
public class ComGraph{
public int nbNodes;
public double totalWeight;
public HashMap<Integer, ComNode> topology;
public ComGraph(){
topology = new HashMap();
}
public double modularity(){
double q = 0;
double m2 = totalWeight;
Iterator<Map.Entry<Integer, ComNode>> it = topology.entrySet().iterator();
while(it.hasNext()){
Map.Entry<Integer, ComNode> entry = it.next();
ComNode node = entry.getValue();
if(node.totK > 0)
q += node.inK / m2 - Math.pow(node.totK/m2, 2);
}
return q;
}
public double modularity2(){
double q = 0;
double m2 = totalWeight;
double total = 0;
Iterator<Map.Entry<Integer, ComNode>> it = topology.entrySet().iterator();
while(it.hasNext()){
Map.Entry<Integer, ComNode> entry = it.next();
int com = entry.getKey();
double in = 0, tot = 0;
ComNode node = entry.getValue();
Iterator<Map.Entry<Integer, Double>> subIt = node.neighbors.entrySet().iterator();
while(subIt.hasNext()){
Map.Entry<Integer, Double> subEntry = subIt.next();
int destCom = subEntry.getKey();
double w = subEntry.getValue();
if(com == destCom)
in += w;
tot += w;
total += w;
}
if(node.totK > 0)
q += in / m2 - Math.pow(tot/m2, 2);
}
//System.out.println("m2: " + m2 + " Total: " + total);
return q;
}
public void removeEmptyComm(int comId){
ComNode node = topology.get(comId);
Iterator<Map.Entry<Integer, Double>> it = node.neighbors.entrySet().iterator();
while(it.hasNext()){
Map.Entry<Integer, Double> entry = it.next();
int destCom = entry.getKey();
if(destCom != comId){
topology.get(destCom).neighbors.remove(comId);
}
}
topology.remove(comId);
nbNodes--;
}
//merge the src community to the dest community
public void mergeComm(int srcCom, int destCom, double conn){
ComNode srcNode = topology.get(srcCom);
ComNode destNode = topology.get(destCom);
Iterator<Map.Entry<Integer, Double>> it = srcNode.neighbors.entrySet().iterator();
while(it.hasNext()){
Map.Entry<Integer, Double> entry = it.next();
int neighCom = entry.getKey();
double neighW = entry.getValue();
if(neighCom != destCom && neighCom != srcCom){
increaseWeight(neighCom, destCom, neighW);
}
}
if(destNode.neighbors.containsKey(destCom))
destNode.neighbors.put(destCom, destNode.neighbors.get(destCom) + srcNode.inK + 2 * conn);
else destNode.neighbors.put(destCom, srcNode.inK + 2*conn);
destNode.inK += srcNode.inK + 2 * conn;
destNode.totK += srcNode.totK;
destNode.size += srcNode.size;
removeEmptyComm(srcCom);
}
public void increaseWeight(int srcCom, int destCom, double deltaW){
ComNode srcNode = topology.get(srcCom);
ComNode destNode = topology.get(destCom);
if(!srcNode.neighbors.containsKey(destCom)){
srcNode.neighbors.put(destCom, 0.0);
destNode.neighbors.put(srcCom, 0.0);
}
srcNode.neighbors.put(destCom, srcNode.neighbors.get(destCom) + deltaW);
destNode.neighbors.put(srcCom, destNode.neighbors.get(srcCom) + deltaW);
}
public double getM2(){
double m2 = 0;
Iterator<Map.Entry<Integer, ComNode>> it = topology.entrySet().iterator();
while(it.hasNext()){
Map.Entry<Integer, ComNode> entry = it.next();
ComNode node = entry.getValue();
Iterator<Map.Entry<Integer, Double>> subIt = node.neighbors.entrySet().iterator();
while(subIt.hasNext()){
Map.Entry<Integer, Double> subEntry = subIt.next();
double w = subEntry.getValue();
m2 += w;
}
}
return m2;
}
}
/**
* The community node
* @author shangjiaxing
*
*/
class ComNode{
double totK; //the total degree of the community
double inK; //the inner degree, i.e., self-loops of the community
double size; //the number of nodes in the community
HashMap<Integer, Double> neighbors; //the neighbor communities, where the key is the community ID and the value is the connections among the two communities
public ComNode(){
totK = 0;
inK = 0;
size = 0;
neighbors = new HashMap();
}
}
/**
*
* @param <T1>
* @param <T2>
*/
class Pair<T1, T2>{
public T1 first;
public T2 second;
public Pair(T1 first, T2 second){
this.first = first;
this.second = second;
}
}
class Link implements Comparable{
int src;
int dest;
public Link(int src, int dest){
if(src < dest){
this.src = src;
this.dest = dest;
}
else{
this.src = dest;
this.dest = src;
}
}
public int compareTo(Object o){
Link e = (Link)o;
if(src < e.src){
return -1;
}
else if(src > e.src){
return 1;
}
else{
if(dest < e.dest)
return -1;
else if(dest > e.dest)
return 1;
else
return 0;
}
}
}
}
| 45,423 | 34.130704 | 159 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/Runnable/runALL.java | package org.dzhuang.dynamic.Runnable;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import org.dzhuang.dynamic.DynaMo.Arrays2;
import org.dzhuang.dynamic.DynaMo.Clustering;
import org.dzhuang.dynamic.DynaMo.Network;
import org.dzhuang.dynamic.DynaMo.VOSClusteringTechnique;
import org.dzhuang.dynamic.OtherAlgorithms.BatchInc;
import org.dzhuang.dynamic.OtherAlgorithms.GreMod;
import org.dzhuang.dynamic.OtherAlgorithms.LearnIncLr;
import org.dzhuang.dynamic.OtherAlgorithms.LearnIncSvm;
import org.dzhuang.dynamic.OtherAlgorithms.QCA;
import org.dzhuang.dynamic.util.FileUtil;
import org.dzhuang.dynamic.util.Parameter;
import it.unimi.dsi.fastutil.ints.Int2BooleanOpenHashMap;
import it.unimi.dsi.fastutil.ints.Int2DoubleOpenHashMap;
import it.unimi.dsi.fastutil.ints.Int2IntOpenHashMap;
import it.unimi.dsi.fastutil.ints.IntOpenHashSet;
import toolbox.lr.LogisticRegression;
import toolbox.svm.SVM;
public class runALL {
public static double resolution_default=1.0;
public static int nRandomStarts_default=1;
public static int nIterations_default_Louvain=10000;
public static int nIterations_default_DynaMo=10;
public static long randomSeed_default=0;
public static void main(String args[]) throws Exception{
for(int i=1;i<=1000;i++) {
runLouvain("Cit-HepTh", 25, i);
runDynamicModularity("Cit-HepTh", 25, i);
runEXP("Cit-HepTh", i);
runLBTR("Cit-HepTh", 25, i);
runLouvain("Cit-HepPh", 31, i);
runDynamicModularity("Cit-HepPh", 31, i);
runEXP("Cit-HepPh", i);
runLBTR("Cit-HepPh", 31, i);
runLouvain("dblp_coauthorship", 31, i);
runDynamicModularity("dblp_coauthorship", 31, i);
runEXP("dblp_coauthorship", i);
runLBTR("dblp_coauthorship", 31, i);
runLouvain("facebook", 28, i);
runDynamicModularity("facebook", 28, i);
runEXP("facebook", i);
runLBTR("facebook", 28, i);
runLouvain("flickr", 24, i);
runDynamicModularity("flickr", 24, i);
runEXP("flickr", i);
runLBTR("flickr", 24, i);
runLouvain("youtube", 33, i);
runDynamicModularity("youtube", 33, i);
runEXP("youtube", i);
runLBTR("youtube", 33, i);
}
}
public static void runDynamicModularity(String dataSet, int nbatch, int itrn) throws IOException, ClassNotFoundException{
String DyNet="data/"+dataSet+"/ntwk2/";
String intNet="data/"+dataSet+"/inct/";
/**********************************************************************************/
// First time call Louvain
Network oldNetwork = Network.load(DyNet+"1");
double resolution2 = resolution_default / (2 * oldNetwork.totalEdgeWeight + oldNetwork.totalEdgeWeightSelfLinks);
Clustering clustering = null;
double maxModularity = Double.NEGATIVE_INFINITY;
Random random = new Random(randomSeed_default);
HashMap<String, Double> alpha2=new HashMap<String, Double>();
System.out.println("1 running");
double[] beta=null;
for (int i=0;i<nRandomStarts_default;i++){
VOSClusteringTechnique VOSClusteringTechnique = new VOSClusteringTechnique(oldNetwork, resolution2);
int j = 0;
boolean update = true;
do{
update = VOSClusteringTechnique.runLouvainAlgorithm(random);
j++;
}
while ((j < nIterations_default_DynaMo) && update);
double modularity = VOSClusteringTechnique.calcQualityFunction();
if (modularity > maxModularity){
clustering = VOSClusteringTechnique.getClustering();
maxModularity = modularity;
}
}
System.out.println("1 done");
writeOutputFile("data/"+dataSet+"/runDynamicModularity_"+dataSet+"_com_1", clustering);
VOSClusteringTechnique VOSClusteringTechnique_temporary = new VOSClusteringTechnique(oldNetwork, clustering, resolution2);
double modularity_temporary = VOSClusteringTechnique_temporary.calcQualityFunction();
if(modularity_temporary>maxModularity)
maxModularity=modularity_temporary;
alpha2=VOSClusteringTechnique_temporary.alpha2;
beta=VOSClusteringTechnique_temporary.beta;
PrintWriter pw=new PrintWriter(dataSet+"_modularity_runDynamicModularity_"+itrn);
/**********************************************************************************/
// DynaMo
for(int ibatch=2;ibatch<=nbatch;ibatch++){
System.out.println(ibatch+" running");
/**********************************************************************************/
// DynaMo-Initialization
/**********************************************************************************/
// newly formed small clusters
Int2IntOpenHashMap clusterInitialization2=new Int2IntOpenHashMap();
int clusterInitializationCNT=0;
// the set of clusters that all their vertices will be initialized as singleton clusters
IntOpenHashSet clusterSet2=new IntOpenHashSet();
Int2IntOpenHashMap nodeAdded_adjacentNode2=new Int2IntOpenHashMap();
Int2DoubleOpenHashMap nodeAdded2_edgeWeight2=new Int2DoubleOpenHashMap();
Int2BooleanOpenHashMap nodeAdded2_flag2=new Int2BooleanOpenHashMap();
/**********************************************************************************/
BufferedReader bufferedReader = new BufferedReader(new FileReader(intNet+ibatch));
String line="";
long t1=System.currentTimeMillis();
while ((line=bufferedReader.readLine())!=null){
String[] lines=line.split("\t");
String FLAG=lines[1];
int startNode=Integer.parseInt(lines[2]);
int endNode=Integer.parseInt(lines[3]);
double wt_new=0.0;
if(FLAG.equals("+"))
wt_new=(lines.length > 4) ? Double.parseDouble(lines[2]) : 1;
else if(FLAG.equals("-"))
wt_new=(lines.length > 4) ? Double.parseDouble(lines[2]) : -1;
else
wt_new=Double.parseDouble(lines[5]);
double wt=wt_new;
// newly added vertices
if(startNode>=oldNetwork.nNodes){ // both startNode and endNode are new
if(!nodeAdded_adjacentNode2.containsKey(startNode)){
nodeAdded_adjacentNode2.put(startNode, endNode);
nodeAdded2_edgeWeight2.put(startNode, wt);
nodeAdded2_flag2.put(startNode, true);
}
else if(nodeAdded2_edgeWeight2.get(startNode)<wt){
nodeAdded_adjacentNode2.replace(startNode, endNode);
nodeAdded2_edgeWeight2.replace(startNode, wt);
nodeAdded2_flag2.replace(startNode, true);
}
else if(nodeAdded2_edgeWeight2.get(startNode)==wt)
nodeAdded2_flag2.replace(startNode, false);
if(!nodeAdded_adjacentNode2.containsKey(endNode)){
nodeAdded_adjacentNode2.put(endNode, startNode);
nodeAdded2_edgeWeight2.put(endNode, wt);
nodeAdded2_flag2.put(endNode, true);
}
else if(nodeAdded2_edgeWeight2.get(endNode)<wt){
nodeAdded_adjacentNode2.replace(endNode, startNode);
nodeAdded2_edgeWeight2.replace(endNode, wt);
nodeAdded2_flag2.replace(endNode, true);
}
else if(nodeAdded2_edgeWeight2.get(endNode)==wt)
nodeAdded2_flag2.replace(endNode, false);
}
else if(endNode>=oldNetwork.nNodes){ // only endNode is new
if(!nodeAdded_adjacentNode2.containsKey(endNode)){
nodeAdded_adjacentNode2.put(endNode, startNode);
nodeAdded2_edgeWeight2.put(endNode, wt);
nodeAdded2_flag2.put(endNode, true);
}
else if(nodeAdded2_edgeWeight2.get(endNode)<wt){
nodeAdded_adjacentNode2.replace(endNode, startNode);
nodeAdded2_edgeWeight2.replace(endNode, wt);
nodeAdded2_flag2.replace(endNode, true);
}
else if(nodeAdded2_edgeWeight2.get(endNode)==wt)
nodeAdded2_flag2.replace(endNode, false);
clusterSet2.add(clustering.getCluster(startNode));
}
// old vertices
else{ // both startNode and endNode are old
int cN1=clustering.getCluster(startNode);
int cN2=clustering.getCluster(endNode);
// edge addition or edge weight increase
if(wt>0.0){
if(cN1==cN2){ // intra-community
clusterSet2.add(cN1);
if(!clusterInitialization2.containsKey(startNode) && !clusterInitialization2.containsKey(endNode)){
clusterInitialization2.put(startNode, clusterInitializationCNT);
clusterInitialization2.put(endNode, clusterInitializationCNT);
clusterInitializationCNT++;
}
else if(!clusterInitialization2.containsKey(startNode))
clusterInitialization2.put(startNode, clusterInitialization2.get(endNode));
else if(!clusterInitialization2.containsKey(endNode))
clusterInitialization2.put(endNode, clusterInitialization2.get(startNode));
}
else{ // cross-community
double m=oldNetwork.totalEdgeWeight;
double acN1=0;
if(alpha2.containsKey(cN1+"_"+cN1)){
acN1=alpha2.get(cN1+"_"+cN1);
}
double acN2=0;
if(alpha2.containsKey(cN2+"_"+cN2)){
acN2=alpha2.get(cN2+"_"+cN2);
}
double bcN1=beta[cN1];
double bcN2=beta[cN2];
double acNk=0;
if(cN1<=cN2)
if(alpha2.containsKey(cN1+"_"+cN2))
acNk=acN1+acN2+2*alpha2.get(cN1+"_"+cN2);
else
acNk=acN1+acN2;
else
if(alpha2.containsKey(cN2+"_"+cN1))
acNk=acN1+acN2+2*alpha2.get(cN2+"_"+cN1);
else
acNk=acN1+acN2;
double alpha22=acN1+acN2-acNk;
double beta2=bcN1+bcN2;
double delta1=2*m-alpha22-beta2;
double delta2=m*alpha22+bcN1*bcN2;
double value=(Math.sqrt(Math.pow(delta1, 2)+4*delta2)-delta1)*0.5;
double delta_W=wt;
if(delta_W>value){
clusterSet2.add(cN1);
clusterSet2.add(cN2);
if(!clusterInitialization2.containsKey(startNode) && !clusterInitialization2.containsKey(endNode)){
clusterInitialization2.put(startNode, clusterInitializationCNT);
clusterInitialization2.put(endNode, clusterInitializationCNT);
clusterInitializationCNT++;
}
else if(!clusterInitialization2.containsKey(startNode))
clusterInitialization2.put(startNode, clusterInitialization2.get(endNode));
else if(!clusterInitialization2.containsKey(endNode))
clusterInitialization2.put(endNode, clusterInitialization2.get(startNode));
}
}
}
// edge deletion or edge weight decrease
else if(wt<0.0 && cN1==cN2){ // intra-community
clusterSet2.add(cN1);
for(int vt:Arrays.copyOfRange(oldNetwork.neighbor, oldNetwork.firstNeighborIndex[startNode], oldNetwork.firstNeighborIndex[startNode + 1]))
clusterSet2.add(clustering.getCluster(vt));
for(int vt:Arrays.copyOfRange(oldNetwork.neighbor, oldNetwork.firstNeighborIndex[endNode], oldNetwork.firstNeighborIndex[endNode + 1]))
clusterSet2.add(clustering.getCluster(vt));
}
}
}
bufferedReader.close();
long t2=System.currentTimeMillis();
/**********************************************************************************/
Network newNetwork=Network.load(DyNet+ibatch);
/**********************************************************************************/
for(Map.Entry<Integer, Integer> entry : nodeAdded_adjacentNode2.int2IntEntrySet()) {
int startNode=(Integer) entry.getKey();
int endNode=(Integer) entry.getValue();
if(nodeAdded2_flag2.get(startNode))
if(!clusterInitialization2.containsKey(startNode) && !clusterInitialization2.containsKey(endNode)){
clusterInitialization2.put(startNode, clusterInitializationCNT);
clusterInitialization2.put(endNode, clusterInitializationCNT);
clusterInitializationCNT++;
}
else if(!clusterInitialization2.containsKey(startNode))
clusterInitialization2.put(startNode, clusterInitialization2.get(endNode));
else if(!clusterInitialization2.containsKey(endNode))
clusterInitialization2.put(endNode, clusterInitialization2.get(startNode));
}
// vertices become singleton communities
IntOpenHashSet singletonNodeSet2=new IntOpenHashSet();
// from certain clusters
for(int k=0;k<oldNetwork.nNodes;k++)
if(!clusterInitialization2.containsKey(k) && clusterSet2.contains(clustering.getCluster(k)))
singletonNodeSet2.add(k);
// from newly added vertices
for(int node : nodeAdded_adjacentNode2.keySet())
if(!clusterInitialization2.containsKey(node))
singletonNodeSet2.add(node);
// Re-organize cluster labels
Int2IntOpenHashMap clusterMap2=new Int2IntOpenHashMap ();
// newly initialized set of clusters
Clustering clustering2=new Clustering(newNetwork.nNodes);
int cnt=0;
for(int k=0;k<newNetwork.nNodes;k++)
if(k<oldNetwork.nNodes && !clusterSet2.contains(clustering.cluster[k])){
if(clusterMap2.containsKey(clustering.cluster[k]))
clustering2.cluster[k]=clusterMap2.get(clustering.cluster[k]);
else{
clustering2.cluster[k]=cnt;
clusterMap2.put(clustering.cluster[k], cnt);
cnt++;
}
}
else if(singletonNodeSet2.contains(k)){
clustering2.cluster[k]=cnt;
cnt++;
}
for(Map.Entry<Integer, Integer> entry : clusterInitialization2.int2IntEntrySet())
clustering2.cluster[entry.getKey()]=cnt+entry.getValue();
clustering2.nClusters=Arrays2.calcMaximum(clustering2.cluster) + 1;
/**********************************************************************************/
// The DynaMo Algorithm
resolution2 = resolution_default / (2 * newNetwork.totalEdgeWeight + newNetwork.totalEdgeWeightSelfLinks);
alpha2=new HashMap<String, Double>();
beta=null;
clustering=null;
double maxModularity2 = Double.NEGATIVE_INFINITY;
random = new Random(randomSeed_default);
long t=0;
for (int i=0;i<nRandomStarts_default;i++){
VOSClusteringTechnique VOSClusteringTechnique2 = new VOSClusteringTechnique(newNetwork, clustering2, resolution2);
int j = 0;
boolean update = true;
long t3=System.currentTimeMillis();
do{
// update = VOSClusteringTechnique2.runLouvainAlgorithm(random);
if (clustering2.nClusters < newNetwork.nNodes)
update = VOSClusteringTechnique2.runLouvainAlgorithm2(random);
else
update = VOSClusteringTechnique2.runLouvainAlgorithm(random);
j++;
}
while ((j < nIterations_default_DynaMo) && update);
long t4=System.currentTimeMillis();
double modularity = VOSClusteringTechnique2.calcQualityFunction();
if (modularity > maxModularity2){
// next old clustering
clustering = VOSClusteringTechnique2.getClustering();
maxModularity2 = modularity;
}
t+=t4-t3;
}
/**********************************************************************************/
writeOutputFile("data/"+dataSet+"/runDynamicModularity_"+dataSet+"_com_"+ibatch, clustering);
VOSClusteringTechnique_temporary = new VOSClusteringTechnique(newNetwork, clustering, resolution2);
modularity_temporary = VOSClusteringTechnique_temporary.calcQualityFunction();
if(modularity_temporary>maxModularity)
maxModularity=modularity_temporary;
alpha2=VOSClusteringTechnique_temporary.alpha2;
beta=VOSClusteringTechnique_temporary.beta;
System.out.println(dataSet+"\t"+"runDynamicModularity"+"\t"+ibatch+"\t"+maxModularity2+"\t"+(t2-t1+t)+"\t"+(t2-t1)+"\t"+t);
pw.println(ibatch+"\t"+maxModularity2+"\t"+(t2-t1+t)+"\t"+(t2-t1)+"\t"+t);
// next old network
oldNetwork=new Network(newNetwork.nNodes, newNetwork.firstNeighborIndex, newNetwork.neighbor, newNetwork.edgeWeight);
}
pw.close();
}
public static void runLouvain(String dataSet, int nbatch, int itrn) throws IOException, ClassNotFoundException{
long t0_1 = System.currentTimeMillis();
String DyNet="data/"+dataSet+"/ntwk2/";
PrintWriter pw=new PrintWriter(dataSet+"_modularity_runLouvain_"+itrn);
long t0_2 = System.currentTimeMillis();
for(int ibatch=2;ibatch<=nbatch;ibatch++){
long t1=System.currentTimeMillis();
Network network = Network.load(DyNet+ibatch);
Clustering clustering = null;
double maxModularity = Double.NEGATIVE_INFINITY;
Random random = new Random(randomSeed_default);
double resolution2 = resolution_default / (2 * network.totalEdgeWeight + network.totalEdgeWeightSelfLinks);
/***************************************************************************/
for (int i = 0; i < nRandomStarts_default; i++){
VOSClusteringTechnique VOSClusteringTechnique = new VOSClusteringTechnique(network, resolution2);
int j = 0;
boolean update = true;
do{
update = VOSClusteringTechnique.runLouvainAlgorithm(random);
j++;
}
while ((j < nIterations_default_Louvain) && update);
double modularity = VOSClusteringTechnique.calcQualityFunction2();
if (modularity > maxModularity){
clustering = VOSClusteringTechnique.getClustering();
maxModularity = modularity;
}
}
/***************************************************************************/
writeOutputFile("data/"+dataSet+"/runLouvain_"+dataSet+"_com_"+(ibatch+1), clustering);
long t2=System.currentTimeMillis();
System.out.println(dataSet+"\t"+"runLouvain"+"\t"+ibatch+"\t"+maxModularity+"\t"+(t2-t1+t0_2-t0_1));
if(ibatch>1)
pw.println(ibatch+"\t"+maxModularity+"\t"+(t2-t1+t0_2-t0_1));
}
pw.close();
}
private static void writeOutputFile(String fileName, Clustering clustering) throws IOException{
BufferedWriter bufferedWriter;
int i, nNodes;
nNodes = clustering.getNNodes();
clustering.orderClustersByNNodes();
bufferedWriter = new BufferedWriter(new FileWriter(fileName));
for (i = 0; i < nNodes; i++){
bufferedWriter.write(Integer.toString(clustering.getCluster(i)));
bufferedWriter.newLine();
}
bufferedWriter.close();
}
public static void runEXP(String dataSet, int itrn) throws Exception {
String graphPath="data2/"+dataSet+"/"+dataSet+"_graph_0.txt";
String initComPath="data2/"+dataSet+"/"+dataSet+"_com_0.txt";
String incPath="data2/"+dataSet+"/"+dataSet+"_inc.txt";
runQCA(graphPath, initComPath, incPath, dataSet, itrn);
runBatchInc(graphPath, initComPath, incPath, dataSet, itrn);
runGreMod(graphPath, initComPath, incPath, dataSet, itrn);
}
public static void runGreMod(String graphPath, String initComPath, String incPath, String dataSet, int itrn) throws Exception{
long t1_1 = System.currentTimeMillis();
String comOutPath = FileUtil.replaceFileName(incPath, dataSet+"_GreMod_community.txt");
String tmpPath = "graph.tmp";
FileUtil.generateGraph(graphPath, tmpPath);
System.out.println("Running incremental algorithm GreMod...");
System.out.println("Loading initial community structure...");
FileUtil.generateGraph(graphPath, tmpPath);
GreMod greMod = new GreMod();
greMod.initialize(tmpPath, initComPath);
System.out.println("Loaded! Time point: 0: modularity: " + greMod.modularity());
long t1_2 = System.currentTimeMillis();
HashMap resultMap = greMod.increase(incPath, 10000, comOutPath);
long t2_1 = System.currentTimeMillis();
ArrayList<Double> modList = (ArrayList<Double>)resultMap.get("modList");
ArrayList<Long> timeList = (ArrayList<Long>)resultMap.get("timeList");
System.out.println("Succeed! There are " + modList.size() + " incremental data points. Community files are also generated in the same path!");
System.out.println("Modularity: " + modList);
System.out.println("Run time: " + timeList);
FileUtil.deleteFile(tmpPath);
String resultPath = FileUtil.replaceFileName(initComPath, dataSet+"_GreMod_result.txt");
BufferedWriter bw = new BufferedWriter(new FileWriter(new File(resultPath)));
bw.write("Q=" + modList.toString() + ";\r\n");
bw.write("T=" + timeList.toString() + ";\r\n");
bw.close();
PrintWriter pw=new PrintWriter(dataSet+"_modularity_runGreMod_"+itrn);
long t2_2 = System.currentTimeMillis();
for(int i=0;i<modList.size();i++){
pw.println(modList.get(i)+"\t"+(timeList.get(i)+t1_2-t1_1+t2_2-t2_1));
}
pw.close();
}
public static void runQCA(String graphPath, String initComPath, String incPath, String dataSet, int itrn) throws Exception{
long t1_1 = System.currentTimeMillis();
System.out.println("Running the QCA2 algorithm...");
QCA qca = new QCA();
qca.init(graphPath, initComPath, 0.0001);
double mod = qca.modularity();
System.out.println("Graph read! Nodes: " + qca.g.nbNodes + " Links: " + qca.g.nbLinks/2);
System.out.println("Community read! Communities: " + qca.nonEmptyCommunities() + " Modularity: " + mod + " hInc.cg.mod: ");
String comOutPath = FileUtil.replaceFileName(initComPath, dataSet+"_QCA2_com.txt");
long t1_2 = System.currentTimeMillis();
HashMap resultMap = qca.increase(incPath, 10000, comOutPath);
long t2_1 = System.currentTimeMillis();
ArrayList<Double> modList = (ArrayList<Double>) resultMap.get("modList");
ArrayList<Long> timeList = (ArrayList<Long>) resultMap.get("timeList");
ArrayList<Integer> comList = (ArrayList<Integer>)resultMap.get("comList");
System.out.println("Q=" + modList + ";");
System.out.println("T=" + timeList + ";");
System.out.println("C=" + comList + ";");
String resultPath = FileUtil.replaceFileName(initComPath, dataSet+"_QCA2_result.txt");
BufferedWriter bw = new BufferedWriter(new FileWriter(new File(resultPath)));
bw.write("Q=" + modList.toString() + ";\r\n");
bw.write("T=" + timeList.toString() + ";\r\n");
bw.write("C=" + comList.toString() + ";\r\n");
bw.close();
System.out.println("See results in File: " + resultPath);
PrintWriter pw=new PrintWriter(dataSet+"_modularity_runQCA2_"+itrn);
long t2_2 = System.currentTimeMillis();
for(int i=0;i<modList.size();i++){
pw.println(modList.get(i)+"\t"+(timeList.get(i)+t1_2-t1_1+t2_2-t2_1));
}
pw.close();
}
public static void runBatchInc(String graphPath, String initComPath, String incPath, String dataSet, int itrn) throws Exception{
long t1_1 = System.currentTimeMillis();
System.out.println("Running the BatchInc2 algorithm...");
BatchInc batchInc = new BatchInc();
batchInc.initialize(graphPath, initComPath);
String comOutPath = FileUtil.replaceFileName(initComPath, dataSet+"_BatchInc2_com.txt");
long t1_2 = System.currentTimeMillis();
HashMap resultMap = batchInc.increase(incPath, 10000, comOutPath);
long t2_1 = System.currentTimeMillis();
ArrayList<Double> modList = (ArrayList<Double>) resultMap.get("modList");
ArrayList<Long> timeList = (ArrayList<Long>) resultMap.get("timeList");
ArrayList<Integer> comList = (ArrayList<Integer>)resultMap.get("comList");
System.out.println("Q=" + modList + ";");
System.out.println("T=" + timeList + ";");
System.out.println("C=" + comList + ";");
String resultPath = FileUtil.replaceFileName(initComPath, dataSet+"_BatchInc2_result.txt");
BufferedWriter bw = new BufferedWriter(new FileWriter(new File(resultPath)));
bw.write("Q=" + modList.toString() + ";\r\n");
bw.write("T=" + timeList.toString() + ";\r\n");
bw.write("C=" + comList.toString() + ";\r\n");
bw.close();
System.out.println("See results in File: " + resultPath);
PrintWriter pw=new PrintWriter(dataSet+"_modularity_runBatch2_"+itrn);
long t2_2 = System.currentTimeMillis();
for(int i=0;i<modList.size();i++){
pw.println(modList.get(i)+"\t"+(timeList.get(i)+t1_2-t1_1+t2_2-t2_1));
}
pw.close();
}
public static void runLBTR(String dataSet, int size, int itrn) throws Exception {
// trainSvmClassifiers(dataSet, size);
runLearnIncSvm(dataSet, itrn);
// trainLrClassifiers(dataSet, size);
runLearnIncLr(dataSet, itrn);
}
public static void trainSvmClassifiers(String dataSet, int size) throws Exception{
for(int i=0;i<size;i++){
BufferedReader br=new BufferedReader(new FileReader("data2/"+dataSet+"/"+dataSet+"_sample_init_"+i+".txt"));
String line="";
int n=0;
int p=0;
while ((line=br.readLine())!=null){
if(line.split("\t")[0].equals("0"))
n++;
else
p++;
}
br.close();
double n2p=(double) n/(double) p;
int maxSize=n+p < 10000 ? n+p : 10000;
String samplePath = "data2/"+dataSet+"/"+dataSet+"_sample_init_"+i+".txt";
String modelPath = "data2/"+dataSet+"/"+dataSet+"_model_SVM_"+i+".txt";
System.out.println("trainSvmClassifiers"+"\t"+dataSet+"\t"+i);
SVM.trainModel(samplePath, modelPath, n2p, maxSize);
}
}
public static void trainLrClassifiers(String dataSet, int size) throws Exception{
for(int i=0;i<size;i++){
BufferedReader br=new BufferedReader(new FileReader("data2/"+dataSet+"/"+dataSet+"_sample_init_"+i+".txt"));
String line="";
int n=0;
int p=0;
while ((line=br.readLine())!=null){
if(line.split("\t")[0].equals("0"))
n++;
else
p++;
}
br.close();
double n2p=(double) n/(double) p;
int maxSize=n+p < 10000 ? n+p : 10000;
int paramNum=3;
double delta = 0.0001;
String samplePath="data2/"+dataSet+"/"+dataSet+"_sample_init_"+i+".txt";
String paramPath="data2/"+dataSet+"/"+dataSet+"_param_LR_"+i+".txt";
LogisticRegression lr = new LogisticRegression(paramNum, delta);
lr.readSample(samplePath);
lr.adjustSampleRatio(n2p);
lr.limitSampleNum(maxSize);
lr.logSample();
lr.start();
lr.normalizeParam();
double param[] = lr.getParam().data;
java.text.DecimalFormat df = Parameter.df;
String str = "param=[" + df.format(param[0]) + ", " + df.format(param[1]) + ", " + df.format(param[2]) + "];\r\n";
System.out.println("trainLrClassifiers"+"\t"+dataSet+"\t"+i);
FileUtil.writeString(paramPath, str);
}
}
public static void runLearnIncSvm(String dataSet, int itrn) throws Exception{
// long t1 = System.currentTimeMillis();
long t1_1 = System.currentTimeMillis();
String graphPath="data2/"+dataSet+"/"+dataSet+"_graph_0.txt";
String comPath="data2/"+dataSet+"/"+dataSet+"_com_0.txt";
String incPath="data2/"+dataSet+"/"+dataSet+"_inc.txt";
LearnIncSvm lInc = new LearnIncSvm();
lInc.init2(graphPath, comPath);
double mod = lInc.modularity();
System.out.println("Graph read! Nodes: " + lInc.g.nbNodes + " Links: " + lInc.g.nbLinks/2);
System.out.println("Community read! Communities: " + lInc.nonEmptyCommunities() + " Modularity: " + mod);
lInc.MAX_MERGE_SIZE=20;
long t1_2 = System.currentTimeMillis();
HashMap resultMap = lInc.increaseNoComOutput(incPath, 10000, dataSet);
long t2_1 = System.currentTimeMillis();
ArrayList<Double> modList = (ArrayList<Double>) resultMap.get("modList");
ArrayList<Long> timeList = (ArrayList<Long>) resultMap.get("timeList");
ArrayList<Integer> comList = (ArrayList<Integer>)resultMap.get("comList");
System.out.println("Q=" + modList + ";");
System.out.println("T=" + timeList + ";");
System.out.println("C=" + comList + ";");
String resultPath = "data2/"+dataSet+"/"+dataSet+"_result_LearnIncSVM.txt";
BufferedWriter bw = new BufferedWriter(new FileWriter(new File(resultPath)));
bw.write("Q=" + modList.toString() + ";\r\n");
bw.write("T=" + timeList.toString() + ";\r\n");
bw.write("C=" + comList.toString() + ";\r\n");
bw.close();
// long t2 = System.currentTimeMillis();
// System.out.println("Time: " + (t2-t1));
long t2_2 = System.currentTimeMillis();
PrintWriter pw=new PrintWriter(dataSet+"_modularity_runLearnIncSvm_"+itrn);
for(int i=0;i<modList.size();i++){
pw.println(modList.get(i)+"\t"+(timeList.get(i)+t1_2-t1_1+t2_2-t2_1));
}
pw.close();
}
public static void runLearnIncLr(String dataSet, int itrn) throws Exception{
// long t1 = System.currentTimeMillis();
long t1_1 = System.currentTimeMillis();
String graphPath ="data2/"+dataSet+"/"+dataSet+"_graph_0.txt";
String incPath = "data2/"+dataSet+"/"+dataSet+"_inc.txt";
String initComPath = "data2/"+dataSet+"/"+dataSet+"_com_0.txt";
LearnIncLr lInc = new LearnIncLr();
lInc.init2(graphPath, initComPath);
double mod = lInc.modularity();
System.out.println("Graph read! Nodes: " + lInc.g.nbNodes + " Links: " + lInc.g.nbLinks/2);
System.out.println("Community read! Communities: " + lInc.nonEmptyCommunities() + " Modularity: " + mod);
lInc.MAX_MERGE_SIZE=20;
long t1_2 = System.currentTimeMillis();
HashMap resultMap = lInc.increaseNoComOutput(incPath, 10000, dataSet);
long t2_1 = System.currentTimeMillis();
ArrayList<Double> modList = (ArrayList<Double>) resultMap.get("modList");
ArrayList<Long> timeList = (ArrayList<Long>) resultMap.get("timeList");
ArrayList<Integer> comList = (ArrayList<Integer>)resultMap.get("comList");
System.out.println("Q=" + modList + ";");
System.out.println("T=" + timeList + ";");
System.out.println("C=" + comList + ";");
String resultPath = "data2/"+dataSet+"/"+dataSet+"_result_LearnIncLR.txt";
BufferedWriter bw = new BufferedWriter(new FileWriter(new File(resultPath)));
bw.write("Q=" + modList.toString() + ";\r\n");
bw.write("T=" + timeList.toString() + ";\r\n");
bw.write("C=" + comList.toString() + ";\r\n");
bw.close();
// long t2 = System.currentTimeMillis();
// System.out.println("Time: " + (t2-t1));
long t2_2 = System.currentTimeMillis();
PrintWriter pw=new PrintWriter(dataSet+"_modularity_runLearnIncLr_"+itrn);
for(int i=0;i<modList.size();i++){
pw.println(modList.get(i)+"\t"+(timeList.get(i)+t1_2-t1_1+t2_2-t2_1));
}
pw.close();
}
}
| 31,559 | 43.388186 | 148 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/Runnable/ParamParser.java | package org.dzhuang.dynamic.Runnable;
import java.util.*;
public class ParamParser {
public static HashMap<String, String> parseParam(String args[]){
HashMap<String, String> paramMap = new HashMap();
for(int i = 0; i < args.length; i++){
if(args[i] != null && args[i].trim().length() > 1){
if(args[i].charAt(0) == '-'){
String paramName = args[i].substring(1, args[i].length());
String paramValue = null;
if(args.length > i+1){
if(args[i+1].charAt(0) != '-')
paramValue = args[i+1];
}
paramMap.put(paramName, paramValue);
}
}
}
return paramMap;
}
public static void printParamMap(HashMap<String, String> paramMap){
Iterator<String> keyIt = paramMap.keySet().iterator();
while(keyIt.hasNext()){
String paramName = keyIt.next();
String paramValue = paramMap.get(paramName);
System.out.println(paramName + ": " + paramValue);
}
}
}
| 915 | 25.171429 | 68 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/Runnable/RunAlgorithm.java | /**
* Run the incremental algorithm
*/
package org.dzhuang.dynamic.Runnable;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.HashMap;
import org.dzhuang.dynamic.DynaMo.ModularityOptimizer_DynaMo;
import org.dzhuang.dynamic.DynaMo.ModularityOptimizer_Louvain;
import org.dzhuang.dynamic.OtherAlgorithms.BatchInc;
import org.dzhuang.dynamic.OtherAlgorithms.GreMod;
import org.dzhuang.dynamic.OtherAlgorithms.LearnIncLr;
import org.dzhuang.dynamic.OtherAlgorithms.LearnIncSvm;
import org.dzhuang.dynamic.OtherAlgorithms.QCA;
import org.dzhuang.dynamic.util.FileUtil;
import org.dzhuang.dynamic.util.Parameter;
import toolbox.lr.LogisticRegression;
import toolbox.svm.SVM;
public class RunAlgorithm {
public static void main(String args[]) throws Exception{
/*ModularityOptimizer_Louvain.runLouvain("Cit-HepPh", 31);
ModularityOptimizer_Louvain.runLouvain("Cit-HepTh", 25);
ModularityOptimizer_Louvain.runLouvain("dblp_coauthorship", 31);
ModularityOptimizer_Louvain.runLouvain("facebook", 28);
ModularityOptimizer_Louvain.runLouvain("flickr", 24);
ModularityOptimizer_Louvain.runLouvain("youtube", 33);
ModularityOptimizer_DynaMo.runDynamicModularity("Cit-HepPh", 31);
ModularityOptimizer_DynaMo.runDynamicModularity("Cit-HepTh", 25);
ModularityOptimizer_DynaMo.runDynamicModularity("dblp_coauthorship", 31);
ModularityOptimizer_DynaMo.runDynamicModularity("facebook", 28);
ModularityOptimizer_DynaMo.runDynamicModularity("flickr", 24);
ModularityOptimizer_DynaMo.runDynamicModularity("youtube", 33);*/
runEXP("Cit-HepPh");
runEXP("Cit-HepTh");
runEXP("dblp_coauthorship");
runEXP("facebook");
runEXP("flickr");
runEXP("youtube");
runLBTR("Cit-HepPh", 31);
runLBTR("Cit-HepTh", 25);
runLBTR("dblp_coauthorship", 31);
runLBTR("facebook", 28);
runLBTR("flickr", 24);
runLBTR("youtube", 33);
}
public static void runEXP(String dataSet) throws Exception {
String graphPath="data2/"+dataSet+"/"+dataSet+"_graph_0.txt";
String initComPath="data2/"+dataSet+"/"+dataSet+"_com_0.txt";
String incPath="data2/"+dataSet+"/"+dataSet+"_inc.txt";
runQCA(graphPath, initComPath, incPath, dataSet);
runBatchInc(graphPath, initComPath, incPath, dataSet);
runGreMod(graphPath, initComPath, incPath, dataSet);
}
public static void runGreMod(String graphPath, String initComPath, String incPath, String dataSet) throws Exception{
String comOutPath = FileUtil.replaceFileName(incPath, dataSet+"_GreMod_community.txt");
String tmpPath = "graph.tmp";
FileUtil.generateGraph(graphPath, tmpPath);
System.out.println("Running incremental algorithm GreMod...");
System.out.println("Loading initial community structure...");
FileUtil.generateGraph(graphPath, tmpPath);
GreMod greMod = new GreMod();
greMod.initialize(tmpPath, initComPath);
System.out.println("Loaded! Time point: 0: modularity: " + greMod.modularity());
HashMap resultMap = greMod.increase(incPath, 10000, comOutPath);
ArrayList<Double> modList = (ArrayList<Double>)resultMap.get("modList");
ArrayList<Double> timeList = (ArrayList<Double>)resultMap.get("timeList");
System.out.println("Succeed! There are " + modList.size() + " incremental data points. Community files are also generated in the same path!");
System.out.println("Modularity: " + modList);
System.out.println("Run time: " + timeList);
FileUtil.deleteFile(tmpPath);
String resultPath = FileUtil.replaceFileName(initComPath, dataSet+"_GreMod_result.txt");
BufferedWriter bw = new BufferedWriter(new FileWriter(new File(resultPath)));
bw.write("Q=" + modList.toString() + ";\r\n");
bw.write("T=" + timeList.toString() + ";\r\n");
bw.close();
PrintWriter pw=new PrintWriter(dataSet+"_modularity_runGreMod");
for(int i=0;i<modList.size();i++){
pw.println(modList.get(i)+"\t"+timeList.get(i)*1000);
}
pw.close();
}
public static void runQCA(String graphPath, String initComPath, String incPath, String dataSet) throws Exception{
System.out.println("Running the QCA2 algorithm...");
QCA qca = new QCA();
qca.init(graphPath, initComPath, 0.0001);
double mod = qca.modularity();
System.out.println("Graph read! Nodes: " + qca.g.nbNodes + " Links: " + qca.g.nbLinks/2);
System.out.println("Community read! Communities: " + qca.nonEmptyCommunities() + " Modularity: " + mod + " hInc.cg.mod: ");
String comOutPath = FileUtil.replaceFileName(initComPath, dataSet+"_QCA2_com.txt");
long t1 = System.currentTimeMillis();
HashMap resultMap = qca.increase(incPath, 10000, comOutPath);
long t2 = System.currentTimeMillis();
ArrayList<Float> modList = (ArrayList<Float>) resultMap.get("modList");
ArrayList<Float> timeList = (ArrayList<Float>) resultMap.get("timeList");
ArrayList<Integer> comList = (ArrayList<Integer>)resultMap.get("comList");
System.out.println("Q=" + modList + ";");
System.out.println("T=" + timeList + ";");
System.out.println("C=" + comList + ";");
String resultPath = FileUtil.replaceFileName(initComPath, dataSet+"_QCA2_result.txt");
BufferedWriter bw = new BufferedWriter(new FileWriter(new File(resultPath)));
bw.write("Q=" + modList.toString() + ";\r\n");
bw.write("T=" + timeList.toString() + ";\r\n");
bw.write("C=" + comList.toString() + ";\r\n");
bw.close();
System.out.println("See results in File: " + resultPath);
PrintWriter pw=new PrintWriter(dataSet+"_modularity_runQCA2");
for(int i=0;i<modList.size();i++){
pw.println(modList.get(i)+"\t"+timeList.get(i)*1000);
}
pw.close();
}
public static void runBatchInc(String graphPath, String initComPath, String incPath, String dataSet) throws Exception{
System.out.println("Running the BatchInc2 algorithm...");
BatchInc batchInc = new BatchInc();
batchInc.initialize(graphPath, initComPath);
String comOutPath = FileUtil.replaceFileName(initComPath, dataSet+"_BatchInc2_com.txt");
long t1 = System.currentTimeMillis();
HashMap resultMap = batchInc.increase(incPath, 10000, comOutPath);
long t2 = System.currentTimeMillis();
ArrayList<Float> modList = (ArrayList<Float>) resultMap.get("modList");
ArrayList<Float> timeList = (ArrayList<Float>) resultMap.get("timeList");
ArrayList<Integer> comList = (ArrayList<Integer>)resultMap.get("comList");
System.out.println("Q=" + modList + ";");
System.out.println("T=" + timeList + ";");
System.out.println("C=" + comList + ";");
String resultPath = FileUtil.replaceFileName(initComPath, dataSet+"_BatchInc2_result.txt");
BufferedWriter bw = new BufferedWriter(new FileWriter(new File(resultPath)));
bw.write("Q=" + modList.toString() + ";\r\n");
bw.write("T=" + timeList.toString() + ";\r\n");
bw.write("C=" + comList.toString() + ";\r\n");
bw.close();
System.out.println("See results in File: " + resultPath);
PrintWriter pw=new PrintWriter(dataSet+"_modularity_runBatch2");
for(int i=0;i<modList.size();i++){
pw.println(modList.get(i)+"\t"+timeList.get(i)*1000);
}
pw.close();
}
public static void runLBTR(String dataSet, int size) throws Exception {
trainSvmClassifiers(dataSet, size);
runLearnIncSvm(dataSet);
trainLrClassifiers(dataSet, size);
runLearnIncLr(dataSet);
}
public static void trainSvmClassifiers(String dataSet, int size) throws Exception{
for(int i=0;i<size;i++){
BufferedReader br=new BufferedReader(new FileReader("data2/"+dataSet+"/"+dataSet+"_sample_init_"+i+".txt"));
String line="";
int n=0;
int p=0;
while ((line=br.readLine())!=null){
if(line.split("\t")[0].equals("0"))
n++;
else
p++;
}
br.close();
double n2p=(double) n/(double) p;
int maxSize=n+p < 10000 ? n+p : 10000;
String samplePath = "data2/"+dataSet+"/"+dataSet+"_sample_init_"+i+".txt";
String modelPath = "data2/"+dataSet+"/"+dataSet+"_model_SVM_"+i+".txt";
System.out.println("trainSvmClassifiers"+"\t"+dataSet+"\t"+i);
SVM.trainModel(samplePath, modelPath, n2p, maxSize);
}
}
public static void trainLrClassifiers(String dataSet, int size) throws Exception{
for(int i=0;i<size;i++){
BufferedReader br=new BufferedReader(new FileReader("data2/"+dataSet+"/"+dataSet+"_sample_init_"+i+".txt"));
String line="";
int n=0;
int p=0;
while ((line=br.readLine())!=null){
if(line.split("\t")[0].equals("0"))
n++;
else
p++;
}
br.close();
double n2p=(double) n/(double) p;
int maxSize=n+p < 10000 ? n+p : 10000;
int paramNum=3;
double delta = 0.0001;
String samplePath="data2/"+dataSet+"/"+dataSet+"_sample_init_"+i+".txt";
String paramPath="data2/"+dataSet+"/"+dataSet+"_param_LR_"+i+".txt";
LogisticRegression lr = new LogisticRegression(paramNum, delta);
lr.readSample(samplePath);
lr.adjustSampleRatio(n2p);
lr.limitSampleNum(maxSize);
lr.logSample();
lr.start();
lr.normalizeParam();
double param[] = lr.getParam().data;
java.text.DecimalFormat df = Parameter.df;
String str = "param=[" + df.format(param[0]) + ", " + df.format(param[1]) + ", " + df.format(param[2]) + "];\r\n";
System.out.println("trainLrClassifiers"+"\t"+dataSet+"\t"+i);
FileUtil.writeString(paramPath, str);
}
}
public static void runLearnIncSvm(String dataSet) throws Exception{
// long t1 = System.currentTimeMillis();
long t1_1 = System.currentTimeMillis();
String graphPath="data2/"+dataSet+"/"+dataSet+"_graph_0.txt";
String comPath="data2/"+dataSet+"/"+dataSet+"_com_0.txt";
String incPath="data2/"+dataSet+"/"+dataSet+"_inc.txt";
LearnIncSvm lInc = new LearnIncSvm();
lInc.init2(graphPath, comPath);
double mod = lInc.modularity();
System.out.println("Graph read! Nodes: " + lInc.g.nbNodes + " Links: " + lInc.g.nbLinks/2);
System.out.println("Community read! Communities: " + lInc.nonEmptyCommunities() + " Modularity: " + mod);
lInc.MAX_MERGE_SIZE=20;
long t1_2 = System.currentTimeMillis();
HashMap resultMap = lInc.increaseNoComOutput(incPath, 10000, dataSet);
long t2_1 = System.currentTimeMillis();
ArrayList<Double> modList = (ArrayList<Double>) resultMap.get("modList");
ArrayList<Long> timeList = (ArrayList<Long>) resultMap.get("timeList");
// ArrayList<Integer> comList = (ArrayList<Integer>)resultMap.get("comList");
// System.out.println("Q=" + modList + ";");
// System.out.println("T=" + timeList + ";");
// System.out.println("C=" + comList + ";");
// String resultPath = "data2/"+dataSet+"/"+dataSet+"_result_LearnIncSVM.txt";
// BufferedWriter bw = new BufferedWriter(new FileWriter(new File(resultPath)));
// bw.write("Q=" + modList.toString() + ";\r\n");
// bw.write("T=" + timeList.toString() + ";\r\n");
// bw.write("C=" + comList.toString() + ";\r\n");
// bw.close();
// long t2 = System.currentTimeMillis();
// System.out.println("Time: " + (t2-t1));
long t2_2 = System.currentTimeMillis();
PrintWriter pw=new PrintWriter(dataSet+"_modularity_runLearnIncSvm");
for(int i=0;i<modList.size();i++){
pw.println(modList.get(i)+"\t"+(timeList.get(i)+t1_2-t1_1+t2_2-t2_1));
}
pw.close();
}
public static void runLearnIncLr(String dataSet) throws Exception{
// long t1 = System.currentTimeMillis();
long t1_1 = System.currentTimeMillis();
String graphPath ="data2/"+dataSet+"/"+dataSet+"_graph_0.txt";
String incPath = "data2/"+dataSet+"/"+dataSet+"_inc.txt";
String initComPath = "data2/"+dataSet+"/"+dataSet+"_com_0.txt";
LearnIncLr lInc = new LearnIncLr();
lInc.init2(graphPath, initComPath);
double mod = lInc.modularity();
System.out.println("Graph read! Nodes: " + lInc.g.nbNodes + " Links: " + lInc.g.nbLinks/2);
System.out.println("Community read! Communities: " + lInc.nonEmptyCommunities() + " Modularity: " + mod);
lInc.MAX_MERGE_SIZE=20;
long t1_2 = System.currentTimeMillis();
HashMap resultMap = lInc.increaseNoComOutput(incPath, 10000, dataSet);
long t2_1 = System.currentTimeMillis();
ArrayList<Double> modList = (ArrayList<Double>) resultMap.get("modList");
ArrayList<Long> timeList = (ArrayList<Long>) resultMap.get("timeList");
// ArrayList<Integer> comList = (ArrayList<Integer>)resultMap.get("comList");
// System.out.println("Q=" + modList + ";");
// System.out.println("T=" + timeList + ";");
// System.out.println("C=" + comList + ";");
// String resultPath = "data2/"+dataSet+"/"+dataSet+"_result_LearnIncLR.txt";
// BufferedWriter bw = new BufferedWriter(new FileWriter(new File(resultPath)));
// bw.write("Q=" + modList.toString() + ";\r\n");
// bw.write("T=" + timeList.toString() + ";\r\n");
// bw.write("C=" + comList.toString() + ";\r\n");
// bw.close();
// long t2 = System.currentTimeMillis();
// System.out.println("Time: " + (t2-t1));
long t2_2 = System.currentTimeMillis();
PrintWriter pw=new PrintWriter(dataSet+"_modularity_runLearnIncLr");
for(int i=0;i<modList.size();i++){
pw.println(modList.get(i)+"\t"+(timeList.get(i)+t1_2-t1_1+t2_2-t2_1));
}
pw.close();
}
}
| 13,273 | 39.346505 | 144 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/preprocessing/ToComparison.java | package org.dzhuang.dynamic.preprocessing;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Random;
import org.dzhuang.dynamic.DynaMo.Clustering;
import org.dzhuang.dynamic.DynaMo.Network;
import org.dzhuang.dynamic.DynaMo.VOSClusteringTechnique;
import org.dzhuang.dynamic.OtherAlgorithms.SampleGenerator3;
import org.dzhuang.dynamic.util.FileUtil;
public class ToComparison {
public static double resolution_default=1.0;
public static int nRandomStarts_default=1000;
public static int nIterations_default=10000;
public static long randomSeed_default=0;
public static double ratio=0.8;
public static void main(String[] args) throws Exception{
// trans2Comparisons("Cit-HepPh", 31);
// trans2Comparisons("Cit-HepTh", 25);
// trans2Comparisons("dblp_coauthorship", 31);
// trans2Comparisons("facebook", 28);
// trans2Comparisons("flickr", 24);
// trans2Comparisons("youtube", 33);
trans2DyPerm("Cit-HepPh", 31);
trans2DyPerm("Cit-HepTh", 25);
trans2DyPerm("dblp_coauthorship", 31);
trans2DyPerm("facebook", 28);
// trans2Comparisons("flickr", 24);
// trans2Comparisons("youtube", 33);
}
public static void trans2DyPerm(String data, int timeLength) throws ClassNotFoundException, IOException{
// FileUtil.deleteDir(new File("data_DyPerm"));
(new File("data_DyPerm")).mkdirs();
FileUtil.deleteDir(new File("data_DyPerm/"+data));
(new File("data_DyPerm/"+data)).mkdirs();
(new File("data_DyPerm/"+data+"/ntwk")).mkdirs();
(new File("data_DyPerm/"+data+"/GroundTruth")).mkdirs();
for(int i=1;i<=timeLength;i++) {
PrintWriter pw=new PrintWriter("data_DyPerm/"+data+"/ntwk/"+i);
BufferedReader bufferedReader = new BufferedReader(new FileReader("data/"+data+"/ntwk/"+i));
String line="";
while ((line=bufferedReader.readLine()) != null) {
pw.println(line);
}
bufferedReader.close();
pw.close();
HashMap<String, HashSet<Integer>> clusteringSet=new HashMap<String, HashSet<Integer>>();
int cnt=0;
bufferedReader = new BufferedReader(new FileReader("data/"+data+"/runLouvain_"+data+"_com_"+(i+1)));
while ((line=bufferedReader.readLine()) != null) {
if(!clusteringSet.containsKey(line)){
HashSet<Integer> tmp=new HashSet<Integer>();
tmp.add(cnt);
clusteringSet.put(line, tmp);
}
else{
HashSet<Integer> tmp=clusteringSet.get(line);
tmp.add(cnt);
}
cnt++;
}
bufferedReader.close();
pw=new PrintWriter("data_DyPerm/"+data+"/GroundTruth/"+data+"_com_"+i+".txt");
for(Map.Entry<String, HashSet<Integer>> ii: clusteringSet.entrySet()){
HashSet<Integer> tmp=ii.getValue();
int cntt=0;
for(int j:tmp){
if(cntt==0)
pw.print(j);
else
pw.print("\t"+j);
cntt++;
}
pw.println();
}
pw.close();
}
}
public static void trans2Comparisons(String data, int timeLength) throws Exception {
FileUtil.deleteDir(new File("data2/"+data));
(new File("data2/"+data)).mkdirs();
PrintWriter pw=new PrintWriter("data2/"+data+"/"+data+"_graph_0.txt");
BufferedReader bufferedReader = new BufferedReader(new FileReader("data/"+data+"/ntwk/1"));
String line="";
while ((line=bufferedReader.readLine()) != null) {
String[] lines=line.split("\t");
if(lines.length>2)
pw.println(line);
else
pw.println(line+"\t"+1);
}
bufferedReader.close();
pw.close();
for(int i=2;i<=timeLength;i++){
bufferedReader = new BufferedReader(new FileReader("data/"+data+"/inct/"+i+""));
line="";
while ((line=bufferedReader.readLine()) != null) {
String[] lines=line.split("\t");
String FLAG=lines[1];
int startNode=Integer.parseInt(lines[2]);
int endNode=Integer.parseInt(lines[3]);
if(FLAG.equals("+")){
pw=new PrintWriter(new FileOutputStream(new File("data2/"+data+"/"+data+"_inc_"+(i-1)+".txt"), true));
pw.println(startNode+"\t"+endNode+"\t"+i);
pw.close();
}
else if(FLAG.equals("-")){
pw=new PrintWriter(new FileOutputStream(new File("data2/"+data+"/"+data+"_dec_"+(i-1)+".txt"), true));
pw.println(startNode+"\t"+endNode+"\t"+i);
pw.close();
}
}
bufferedReader.close();
}
runLouvain(data, "data/"+data+"/ntwk/1");
/************************************************************************/
String initDataPath = "data2/"+data+"/"+data+"_inc.tmp";
FileUtil.deleteFile(initDataPath);
File initFile = new File(initDataPath);
initFile.createNewFile();
String incDataPath = "data2/"+data+"/"+data+"_graph_0.txt";
FileUtil.append(initDataPath, incDataPath);
String samplePath = "data2/"+data+"/"+data+"_sample_init_"+0+".txt";
SampleGenerator3 generator2 = new SampleGenerator3();
generator2.generateSample(initDataPath, ratio, samplePath);
for(int i=1;i<timeLength;i++){
incDataPath = "data2/"+data+"/"+data+"_inc_"+i+".txt";
FileUtil.append(initDataPath, incDataPath);
samplePath = "data2/"+data+"/"+data+"_sample_init_"+i+".txt";
SampleGenerator3 generator = new SampleGenerator3();
generator.generateSample(initDataPath, ratio, samplePath);
}
FileUtil.deleteFile(initDataPath);
}
public static void runLouvain(String data, String net) throws IOException, ClassNotFoundException{
Network network = readInputFile(net);
Clustering clustering = null;
double maxModularity = Double.NEGATIVE_INFINITY;
Random random = new Random(randomSeed_default);
double resolution2 = resolution_default / (2 * network.totalEdgeWeight + network.totalEdgeWeightSelfLinks);
for (int i = 0; i < nRandomStarts_default; i++){
VOSClusteringTechnique VOSClusteringTechnique = new VOSClusteringTechnique(network, resolution2);
int j = 0;
boolean update = true;
do{
update = VOSClusteringTechnique.runLouvainAlgorithm(random);
j++;
}while ((j < nIterations_default) && update);
double modularity = VOSClusteringTechnique.calcQualityFunction2();
if (modularity > maxModularity){
clustering = VOSClusteringTechnique.getClustering();
maxModularity = modularity;
}
}
HashMap<Integer, HashSet<Integer>> clusteringSet=new HashMap<Integer, HashSet<Integer>>();
for(int i=0;i<network.getNNodes();i++){
if(!clusteringSet.containsKey(clustering.getCluster(i))){
HashSet<Integer> tmp=new HashSet<Integer>();
tmp.add(i);
clusteringSet.put(clustering.getCluster(i), tmp);
}
else{
HashSet<Integer> tmp=clusteringSet.get(clustering.getCluster(i));
tmp.add(i);
}
}
PrintWriter pw=new PrintWriter("data2/"+data+"/"+data+"_com_0.txt");
for(Map.Entry<Integer, HashSet<Integer>>i: clusteringSet.entrySet()){
HashSet<Integer> tmp=i.getValue();
int cnt=0;
for(int j:tmp){
if(cnt==0)
pw.print(j);
else
pw.print("\t"+j);
cnt++;
}
pw.println();
}
pw.close();
}
private static Network readInputFile(String fileName) throws IOException{
ArrayList<Double> edgeWeight1_List=new ArrayList<Double>();
ArrayList<Integer> node1_List=new ArrayList<Integer>();
ArrayList<Integer> node2_List=new ArrayList<Integer>();
BufferedReader bufferedReader = new BufferedReader(new FileReader(fileName));
String line="";
int maxNode=-1;
while ((line=bufferedReader.readLine())!=null){
String[] lines=line.split("\t");
int startNode=Integer.parseInt(lines[0]);
int endNode=Integer.parseInt(lines[1]);
double wt_new=(lines.length > 2) ? Double.parseDouble(lines[2]) : 1;
node1_List.add(startNode);
node2_List.add(endNode);
edgeWeight1_List.add(wt_new);
if (endNode > maxNode)
maxNode = endNode;
}
bufferedReader.close();
int nNodes = maxNode + 1;
int[] nNeighbors = new int[nNodes];
for (int i = 0; i < node1_List.size(); i++)
if (node1_List.get(i) < node2_List.get(i)){
nNeighbors[node1_List.get(i)]++;
nNeighbors[node2_List.get(i)]++;
}
int[] firstNeighborIndex = new int[nNodes + 1];
int nEdges = 0;
for (int i = 0; i < nNodes; i++){
firstNeighborIndex[i] = nEdges;
nEdges += nNeighbors[i];
}
firstNeighborIndex[nNodes] = nEdges;
int[] neighbor = new int[nEdges];
double[] edgeWeight2 = new double[nEdges];
Arrays.fill(nNeighbors, 0);
for (int i = 0; i < node1_List.size(); i++)
if (node1_List.get(i) < node2_List.get(i)){
int j = firstNeighborIndex[node1_List.get(i)] + nNeighbors[node1_List.get(i)];
neighbor[j] = node2_List.get(i);
edgeWeight2[j] = edgeWeight1_List.get(i);
nNeighbors[node1_List.get(i)]++;
j = firstNeighborIndex[node2_List.get(i)] + nNeighbors[node2_List.get(i)];
neighbor[j] = node1_List.get(i);
edgeWeight2[j] = edgeWeight1_List.get(i);
nNeighbors[node2_List.get(i)]++;
}
Network network = new Network(nNodes, firstNeighborIndex, neighbor, edgeWeight2);
return network;
}
}
| 9,931 | 35.921933 | 115 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/org/dzhuang/dynamic/preprocessing/toDynaMo.java | package org.dzhuang.dynamic.preprocessing;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import org.dzhuang.dynamic.DynaMo.Network;
import org.dzhuang.dynamic.util.FileUtil;
public class toDynaMo {
public static void main(String[] args) throws IOException, ClassNotFoundException {
// run("Cit-HepPh", 31);
// run("Cit-HepTh", 25);
// run("dblp_coauthorship", 31);
// run("facebook", 28);
// run("flickr", 24);
// run("youtube", 33);
// run2("networks_us-101", 3991);
// run2("networks_i-80", 3991);
// run2("networks_lankershim", 3991);
// run2("networks_peachtree", 3991);
// for(int i=3990;i>=0;i--) {
// File f1 = new File("/home/durham314/eclipse-workspace/dynamic/data/networks_peachtree/ntwk/"+i);
// File f2 = new File("/home/durham314/eclipse-workspace/dynamic/data/networks_peachtree/ntwk/"+(i+1));
// f1.renameTo(f2);
// }
}
public static void run2(String dataSet, int size) throws IOException{
FileUtil.deleteDir(new File("data/"+dataSet+"/inct"));
(new File("data/"+dataSet+"/inct")).mkdir();
FileUtil.deleteDir(new File("data/"+dataSet+"/ntwk2"));
(new File("data/"+dataSet+"/ntwk2")).mkdir();
for(int i=0;i<=size-1;i++) {
Network network = readInputFile("data/"+dataSet+"/ntwk/"+i);
network.save("data/"+dataSet+"/ntwk2/"+(i+1));
}
for(int i=1;i<=size-1;i++){
HashSet<String> oldNetwork=new HashSet<String>();
HashSet<String> newNetwork=new HashSet<String>();
BufferedReader bufferedReader = new BufferedReader(new FileReader("data/"+dataSet+"/ntwk/"+(i)));
String line="";
while ((line=bufferedReader.readLine()) != null) {
newNetwork.add(line);
}
bufferedReader.close();
int cnt=0;
PrintWriter pw=new PrintWriter("data/"+dataSet+"/inct/"+(i+1));
bufferedReader = new BufferedReader(new FileReader("data/"+dataSet+"/ntwk/"+(i-1)));
line="";
while ((line=bufferedReader.readLine()) != null) {
oldNetwork.add(line);
if(!newNetwork.contains(line))
pw.println(cnt+"\t"+"-"+"\t"+line);
cnt++;
}
bufferedReader.close();
cnt=0;
bufferedReader = new BufferedReader(new FileReader("data/"+dataSet+"/ntwk/"+(i)));
line="";
while ((line=bufferedReader.readLine()) != null) {
if(!oldNetwork.contains(line))
pw.println(cnt+"\t"+"+"+"\t"+line);
cnt++;
}
bufferedReader.close();
pw.close();
}
}
public static void run(String dataSet, int size) throws IOException{
FileUtil.deleteDir(new File("data/"+dataSet+"/inct"));
(new File("data/"+dataSet+"/inct")).mkdir();
FileUtil.deleteDir(new File("data/"+dataSet+"/ntwk2"));
(new File("data/"+dataSet+"/ntwk2")).mkdir();
for(int i=1;i<=size;i++) {
Network network = readInputFile("data/"+dataSet+"/ntwk/"+i);
network.save("data/"+dataSet+"/ntwk2/"+i);
}
for(int i=2;i<=size;i++){
HashSet<String> oldNetwork=new HashSet<String>();
HashSet<String> newNetwork=new HashSet<String>();
BufferedReader bufferedReader = new BufferedReader(new FileReader("data/"+dataSet+"/ntwk/"+(i)));
String line="";
while ((line=bufferedReader.readLine()) != null) {
newNetwork.add(line);
}
bufferedReader.close();
int cnt=0;
PrintWriter pw=new PrintWriter("data/"+dataSet+"/inct/"+i);
bufferedReader = new BufferedReader(new FileReader("data/"+dataSet+"/ntwk/"+(i-1)));
line="";
while ((line=bufferedReader.readLine()) != null) {
oldNetwork.add(line);
if(!newNetwork.contains(line))
pw.println(cnt+"\t"+"-"+"\t"+line);
cnt++;
}
bufferedReader.close();
cnt=0;
bufferedReader = new BufferedReader(new FileReader("data/"+dataSet+"/ntwk/"+(i)));
line="";
while ((line=bufferedReader.readLine()) != null) {
if(!oldNetwork.contains(line))
pw.println(cnt+"\t"+"+"+"\t"+line);
cnt++;
}
bufferedReader.close();
pw.close();
}
}
private static Network readInputFile(String fileName) throws IOException{
ArrayList<Double> edgeWeight1_List=new ArrayList<Double>();
ArrayList<Integer> node1_List=new ArrayList<Integer>();
ArrayList<Integer> node2_List=new ArrayList<Integer>();
BufferedReader bufferedReader = new BufferedReader(new FileReader(fileName));
String line="";
int maxNode=-1;
while ((line=bufferedReader.readLine())!=null){
String[] lines=line.split("\t");
int startNode=Integer.parseInt(lines[0]);
int endNode=Integer.parseInt(lines[1]);
double wt_new=(lines.length > 2) ? Double.parseDouble(lines[2]) : 1;
node1_List.add(startNode);
node2_List.add(endNode);
edgeWeight1_List.add(wt_new);
if (endNode > maxNode)
maxNode = endNode;
}
bufferedReader.close();
int nNodes = maxNode + 1;
int[] nNeighbors = new int[nNodes];
for (int i = 0; i < node1_List.size(); i++)
if (node1_List.get(i) < node2_List.get(i)){
nNeighbors[node1_List.get(i)]++;
nNeighbors[node2_List.get(i)]++;
}
int[] firstNeighborIndex = new int[nNodes + 1];
int nEdges = 0;
for (int i = 0; i < nNodes; i++){
firstNeighborIndex[i] = nEdges;
nEdges += nNeighbors[i];
}
firstNeighborIndex[nNodes] = nEdges;
int[] neighbor = new int[nEdges];
double[] edgeWeight2 = new double[nEdges];
Arrays.fill(nNeighbors, 0);
for (int i = 0; i < node1_List.size(); i++)
if (node1_List.get(i) < node2_List.get(i)){
int j = firstNeighborIndex[node1_List.get(i)] + nNeighbors[node1_List.get(i)];
neighbor[j] = node2_List.get(i);
edgeWeight2[j] = edgeWeight1_List.get(i);
nNeighbors[node1_List.get(i)]++;
j = firstNeighborIndex[node2_List.get(i)] + nNeighbors[node2_List.get(i)];
neighbor[j] = node1_List.get(i);
edgeWeight2[j] = edgeWeight1_List.get(i);
nNeighbors[node2_List.get(i)]++;
}
Network network = new Network(nNodes, firstNeighborIndex, neighbor, edgeWeight2);
return network;
}
}
| 6,405 | 33.627027 | 105 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/toolbox/ClassifierUtil.java | package toolbox;
import java.io.*;
import java.util.*;
import libsvm.*;
import toolbox.lr.*;
import toolbox.svm.*;
import org.dzhuang.dynamic.util.Parameter;
import org.dzhuang.dynamic.util.Utility;
public class ClassifierUtil {
/**
* remove the duplicate samples
* @param sampleList
* @return
*/
public static ArrayList<Sample> uniqueSample(ArrayList<Sample> sampleList){
ArrayList<Sample> oldList = sampleList;
sampleList = new ArrayList();
TreeSet<Sample> sampleSet = new TreeSet();
for(int i = 0; i < oldList.size(); i++){
Sample sample = oldList.get(i);
if(!sampleSet.contains(sample)){
sampleList.add(sample);
sampleSet.add(sample);
}
}
return sampleList;
}
public static ArrayList<Sample> limitSampleSize(ArrayList<Sample> sampleList, int size){
ArrayList<Sample> oldList = sampleList;
sampleList = new ArrayList();
ArrayList<Integer> randomOrder = Utility.randomOrderList(oldList.size());
double p = (double)size / oldList.size();
for(int i = 0; i < size && i < oldList.size(); i++){
sampleList.add(oldList.get(randomOrder.get(i)));
}
return sampleList;
}
public static ArrayList<Sample> adjustSampleRatio(ArrayList<Sample> sampleList, double n2pRatio){
ArrayList<Sample> oldList = sampleList;
sampleList = new ArrayList();
ArrayList<Sample> positiveList = new ArrayList();
ArrayList<Sample> negativeList = new ArrayList();
for(int i = 0; i < oldList.size(); i++){
Sample sample = oldList.get(i);
if(sample.type == SampleType.POSITIVE)
positiveList.add(sample);
else
negativeList.add(sample);
}
if(negativeList.size() == 0 || positiveList.size() == 0)
return oldList;
if (negativeList.size() >= (int) (positiveList.size() * n2pRatio)) { // there are enough negative samples
for (int i = 0; i < positiveList.size(); i++)
sampleList.add(positiveList.get(i));
int negatives = (int) (positiveList.size() * n2pRatio);
ArrayList<Integer> orderList = Utility.randomOrderList(negativeList.size());
for (int i = 0; i < negatives; i++) {
sampleList.add(negativeList.get(orderList.get(i)));
}
} else {
int positives = (int) (negativeList.size() / n2pRatio);
ArrayList<Integer> orderList = Utility.randomOrderList(positiveList.size());
for (int i = 0; i < positives; i++) {
sampleList.add(positiveList.get(orderList.get(i)));
}
for (int i = 0; i < negativeList.size(); i++) {
sampleList.add(negativeList.get(i));
}
}
return sampleList;
}
public static ArrayList<Sample> logScaleSample(ArrayList<Sample> sampleList){
for(int i = 0; i < sampleList.size(); i++){
Sample sample = sampleList.get(i);
sample.toLogValue(1);
}
return sampleList;
}
public static ArrayList<Sample> readSampleList(String samplePath) throws Exception{
BufferedReader br = new BufferedReader(new FileReader(samplePath));
ArrayList<Sample> sampleList = new ArrayList();
String str = br.readLine();
int paramNum = 0;
StringTokenizer token = new StringTokenizer(str, "\t");
while(token.hasMoreTokens()){
token.nextToken();
paramNum++;
}
while(str != null){
token = new StringTokenizer(str, "\t");
double data[] = new double[paramNum];
int type = new Integer(token.nextToken());
data[0] = type;
int i = 1;
while(token.hasMoreTokens()){
data[i++] = new Double(token.nextToken());
}
Sample sample = new Sample(data, type);
sampleList.add(sample);
str = br.readLine();
}
br.close();
return sampleList;
}
public static void writeSampleList(ArrayList<Sample> sampleList, String outputPath) throws Exception{
BufferedWriter bw = new BufferedWriter(new FileWriter(outputPath));
for(int i = 0; i < sampleList.size(); i++){
Sample sample = sampleList.get(i);
String str = "" + sample.type;
for(int j = 1; j < sample.data.length; j++){
str += "\t" + (float)sample.data[j];
}
str += "\r\n";
bw.write(str);
}
bw.close();
}
public static void writeLibsvmSampleList(ArrayList<Sample> sampleList, String outputPath) throws Exception{
BufferedWriter bw = new BufferedWriter(new FileWriter(outputPath));
for(int i = 0; i < sampleList.size(); i++){
Sample sample = sampleList.get(i);
String str = "+1";
if(sample.type == SampleType.NEGATIVE)
str = "-1";
for(int j = 1; j < sample.data.length; j++){
str += " " + j + ":" + (float)sample.data[j];
}
str += "\r\n";
bw.write(str);
}
bw.close();
}
/**
* Transform the sample file format to LibSvm format
* @param inputPath
* @param outputPath
* @param doScale
* @throws Exception
*/
public static void toLibsvmFormat(String inputPath, String outputPath, boolean doScale) throws Exception{
BufferedReader br = new BufferedReader(new FileReader(inputPath));
BufferedWriter bw = new BufferedWriter(new FileWriter(outputPath));
String str = br.readLine();
while(str != null){
String outStr = "-1";
StringTokenizer token = new StringTokenizer(str, "\t");
int type = new Integer(token.nextToken());
if(type == SampleType.POSITIVE)
outStr = "+1";
int index = 1;
while(token.hasMoreTokens()){
double value = new Double(token.nextToken());
if(doScale)
value = Math.log(value+1);
outStr += " " + index + ":" + Parameter.df.format(value);
index++;
}
outStr += "\r\n";
bw.write(outStr);
str = br.readLine();
}
br.close();
bw.close();
}
public static SvmSample parseSvmSample(String str){
StringTokenizer token = new StringTokenizer(str," \t\n\r\f:");
int type = atof(token.nextToken()) > 0 ? 1:0;
int m = token.countTokens() / 2;
svm_node[] x = new svm_node[m];
for(int j=0;j<m;j++)
{
x[j] = new svm_node();
x[j].index = atoi(token.nextToken());
x[j].value = atof(token.nextToken());
}
SvmSample sample = new SvmSample(x, type);
return sample;
}
public static SvmSample parseSvmSample(Sample sample){
svm_node[] x = new svm_node[sample.data.length-1];
for(int i = 0; i < x.length; i++){
x[i] = new svm_node();
x[i].index = i+1;
x[i].value = sample.data[i+1];
}
SvmSample svmSample = new SvmSample(x, sample.type);
return svmSample;
}
public static ArrayList<Sample>[] samplePartition(ArrayList<Sample> sampleList, int fold){
ArrayList<Integer> randomOrder = Utility.randomOrderList(sampleList.size());
int subSize = sampleList.size() / fold; //the size of each subset
ArrayList<Sample> listArr[] = new ArrayList [fold];
for(int i = 0; i < fold; i++){
listArr[i] = new ArrayList();
}
int i = 0, j = 0;
while(i < sampleList.size()){
Sample sample = sampleList.get(randomOrder.get(i));
listArr[j].add(sample);
i++;
if(i % subSize == 0 && j < listArr.length-1)
j++;
}
return listArr;
}
public static double atof(String s)
{
return Double.valueOf(s).doubleValue();
}
public static int atoi(String s)
{
return Integer.parseInt(s);
}
}
| 6,921 | 28.708155 | 108 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/toolbox/lr/LogisticRegression.java | /**
* The logistic regression classification model
*/
package toolbox.lr;
import java.io.BufferedReader;
import java.io.FileReader;
import java.util.*;
import toolbox.ClassifierUtil;
import org.dzhuang.dynamic.util.Utility;
public class LogisticRegression {
public static void main(String args[]) throws Exception{
//This is an example showing how to use the Logistic Regression
double d1[] = {1,0}, d2[]={1,3}; //the first element for each data is 1
Sample s1 = new Sample(d1, SampleType.NEGATIVE);
Sample s2 = new Sample(d2, SampleType.POSITIVE);
ArrayList<Sample> sampleList = new ArrayList();
sampleList.add(s1);
sampleList.add(s2);
LogisticRegression lr = new LogisticRegression(sampleList, 2, 0.001);
lr.setSteplen(1);
lr.setSteplen(0.96);
lr.start();
//lr.normalizeParam();
Param param = lr.getParam();
System.out.println("The trained param: " + param);
System.out.println("The predicted value: " + lr.getLogisticValue(s1) + " " + lr.getLogisticValue(s2));
}
ArrayList<Sample> sampleList;
Param param;
int paramNum;
double delta;
double stepLen = 1, decay=0.98;
int iterations = 0;
public LogisticRegression(int paramNum, double delta){
this.paramNum = paramNum;
this.param = new Param(paramNum, 1);
this.delta = delta;
}
public LogisticRegression(ArrayList<Sample> sampleList, int paramNum, double delta){
this.sampleList = sampleList;
this.paramNum = paramNum;
this.param = new Param(paramNum, 1);
this.delta = delta;
}
/**
* Start the logistic regression model
*/
public void start(){
Param param1 = new Param(paramNum, 1);
do{
param.setParam(param1.data);
iterations++;
Param gradient = new Param(paramNum, 0);
for(int j = 0; j < paramNum; j++){
double sum = 0;
for(int i = 0; i < sampleList.size(); i++){
Sample sample = sampleList.get(i);
double y = 0;
if(sample.type == SampleType.POSITIVE)
y = 1;
double hValue = getLogisticValue(sample);
sum += (y - hValue) * sample.data[j];
}
gradient.data[j] = sum;
param1.data[j] = param.data[j] + stepLen * sum;
}
stepLen *= decay;
}while(VectorUtil.module(VectorUtil.subtract(param.data, param1.data)) > delta);
}
public double[] start(ArrayList<Sample> trainList, ArrayList<Sample> testList){
Param para = new Param(paramNum, 1);
Param para1 = new Param(paramNum, 1);
stepLen = 1;
do{
para.setParam(para1.data);
Param gradient = new Param(paramNum, 0);
for(int j = 0; j < paramNum; j++){
double sum = 0;
for(int i = 0; i < trainList.size(); i++){
Sample sample = trainList.get(i);
double y = 0;
if(sample.type == SampleType.POSITIVE)
y = 1;
double hValue = getLogisticValue(sample, para);
sum += (y - hValue) * sample.data[j];
}
gradient.data[j] = sum;
para1.data[j] = para.data[j] + stepLen * sum;
}
stepLen *= decay;
}while(VectorUtil.module(VectorUtil.subtract(para.data, para1.data)) > delta);
para.data = VectorUtil.normalize(para.data);
System.out.println("Param: " + para);
double accuracy[] = validate(testList, para);
return accuracy;
}
public static double[] validate(ArrayList<Sample> testList, Param param){
double positives = 0, hits = 0, preNum = 0;
for(int i = 0; i < testList.size(); i++){
int preType = SampleType.NEGATIVE;
Sample sample = testList.get(i);
if(sample.type == SampleType.POSITIVE)
positives++; //real positives
double prob = getLogisticValue(sample, param);
if(prob >= 0.5){
preType = SampleType.POSITIVE;
preNum++; //predicted positives
if(preType == sample.type)
hits++;
}
}
double precision = hits / preNum;
double recall = hits / positives;
double fScore = 2 * precision * recall / (precision + recall);
System.out.println("Predicted positives: " + preNum + " Hits: " + hits + " Real positives: " + positives);
return new double[]{precision, recall, fScore};
}
public double[] crossValidation(int fold, double n2pRatio, int maxSize){
//firstly partition the sample set into several subsets
ArrayList<Sample> listArr[] = samplePartition(fold);
double result[] = new double[3];
double num[] = new double[3];
for(int i = 0; i < fold; i++){
ArrayList<Sample> trainList = new ArrayList();
ArrayList<Sample> testList = new ArrayList();
for(int j = 0; j < fold; j++){
if(i == j)
testList.addAll(listArr[j]);
else
trainList.addAll(listArr[j]);
}
if(n2pRatio > 0)
trainList = ClassifierUtil.adjustSampleRatio(trainList, n2pRatio);
if(maxSize > 0)
trainList = ClassifierUtil.limitSampleSize(trainList, maxSize);
System.out.println("Run #" + (i+1) + " Train: " + trainList.size() + " Test: " + testList.size());
double subResult[] = start(trainList, testList);
for(int j = 0; j < 3; j++){
if(!Double.isNaN(subResult[j]) && !Double.isInfinite(subResult[j])){
result[j] += subResult[j];
num[j]++;
}
}
System.out.println("Precision: " + subResult[0] + " Recall: " + subResult[1] + " fScore: " + subResult[2]);
}
for(int i = 0; i < 3; i++){
result[i] = result[i] / num[i];
}
return result;
}
/**
* partition the sample set into several subsets
* @param fold
* @return
*/
public ArrayList<Sample>[] samplePartition(int fold){
ArrayList<Integer> randomOrder = Utility.randomOrderList(sampleList.size());
int subSize = sampleList.size() / fold; //the size of each subset
ArrayList<Sample> listArr[] = new ArrayList [fold];
for(int i = 0; i < fold; i++){
listArr[i] = new ArrayList();
}
int i = 0, j = 0;
while(i < sampleList.size()){
Sample sample = sampleList.get(randomOrder.get(i));
listArr[j].add(sample);
i++;
if(i % subSize == 0 && j < listArr.length-1)
j++;
}
return listArr;
}
/**
*
* @param sample
* @return
*/
public double getLogisticValue(Sample sample){
double sum = 0;
for(int i = 0; i < sample.data.length; i++){
sum += sample.data[i] * param.data[i];
}
double result = 1 / (1+Math.exp(-1 * sum));
return result;
}
/**
*
* @param sample
* @param param
* @return
*/
public static double getLogisticValue(Sample sample, Param param){
double sum = 0;
for(int i = 0; i < sample.data.length; i++){
sum += sample.data[i] * param.data[i];
}
double result = 1 / (1+Math.exp(-1 * sum));
return result;
}
/**
*
* @param inputPath
*/
public void readSample(String inputPath) throws Exception{
//System.out.println("Reading samples from: " + inputPath);
sampleList = new ArrayList();
BufferedReader br = new BufferedReader(new FileReader(inputPath));
String str = br.readLine();
int positives = 0, negatives = 0;
while(str != null){
StringTokenizer token = new StringTokenizer(str, "\t");
int type = new Integer(token.nextToken());
double data[] = new double[paramNum];
data[0] = 1;
int i = 1;
while(token.hasMoreTokens()){
data[i++] = new Double(token.nextToken());
}
Sample sample = new Sample(data, type);
sampleList.add(sample);
if(type == SampleType.POSITIVE)
positives++;
else
negatives++;
str = br.readLine();
}
br.close();
//System.out.println("Samples read! #Positives: " + positives + " #Negatives: " + negatives);
}
public void logSample(){
for(int i = 0; i < sampleList.size(); i++){
Sample sample = sampleList.get(i);
sample.toLogValue(1);
}
}
/**
* Adjust the ratio of positives : negatives
*/
public void adjustSampleRatio(double n2pRatio){
ArrayList<Sample> oldList = sampleList;
sampleList = new ArrayList();
ArrayList<Sample> positiveList = new ArrayList();
ArrayList<Sample> negativeList = new ArrayList();
for(int i = 0; i < oldList.size(); i++){
Sample sample = oldList.get(i);
if(sample.type == SampleType.POSITIVE)
positiveList.add(sample);
else
negativeList.add(sample);
}
System.out.println("Positives: " + positiveList.size() + " Negatives: " + negativeList.size());
if (negativeList.size() >= (int) (positiveList.size() * n2pRatio)) { // there are enough negative samples
for (int i = 0; i < positiveList.size(); i++)
sampleList.add(positiveList.get(i));
int negatives = (int) (positiveList.size() * n2pRatio);
ArrayList<Integer> orderList = Utility.randomOrderList(negativeList.size());
for (int i = 0; i < negatives; i++) {
sampleList.add(negativeList.get(orderList.get(i)));
}
} else {
int positives = (int) (negativeList.size() / n2pRatio);
ArrayList<Integer> orderList = Utility.randomOrderList(positiveList.size());
for (int i = 0; i < positives; i++) {
sampleList.add(positiveList.get(orderList.get(i)));
}
for (int i = 0; i < negativeList.size(); i++) {
sampleList.add(negativeList.get(i));
}
}
}
public void limitSampleNum(int num){
if(sampleList.size() <= num)
return;
double p = (double)num / sampleList.size();
ArrayList<Sample> oldList = sampleList;
sampleList = new ArrayList();
for(int i = 0; i < oldList.size(); i++){
if(Math.random() <= p)
sampleList.add(oldList.get(i));
}
}
public void normalizeParam(){
param.data = VectorUtil.normalize(param.data);
}
public ArrayList<Sample> getSampleList() {
return sampleList;
}
public void setSampleList(ArrayList<Sample> sampleList) {
this.sampleList = sampleList;
}
public Param getParam() {
return param;
}
public void setParam(Param param) {
this.param = param;
}
public int getParamNum() {
return paramNum;
}
public void setParamNum(int paramNum) {
this.paramNum = paramNum;
}
public double getDelta() {
return delta;
}
public void setDelta(double delta) {
this.delta = delta;
}
public double getSteplen() {
return stepLen;
}
public void setSteplen(double stepLen) {
this.stepLen = stepLen;
}
public double getDecay() {
return decay;
}
public void setDecay(double decay) {
this.decay = decay;
}
public int getIterations() {
return iterations;
}
public void setIterations(int iterations) {
this.iterations = iterations;
}
}
| 10,133 | 26.840659 | 114 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/toolbox/lr/SampleScale.java | package toolbox.lr;
public class SampleScale {
public static double logScale(int value){
return Math.log(value);
}
}
| 125 | 11.6 | 42 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/toolbox/lr/VectorUtil.java | package toolbox.lr;
public class VectorUtil {
/**
*
* @param data1
* @param data2
* @return
*/
public static double[] add(double data1[], double data2[]){
double result[] = new double[data1.length];
for(int i = 0; i < data1.length; i++)
result[i] = data1[i] + data2[i];
return result;
}
/**
*
* @param data1
* @param data2
* @return
*/
public static double[] subtract(double data1[], double data2[]){
double result[] = new double[data1.length];
for(int i = 0; i < data1.length; i++)
result[i] = data1[i] - data2[i];
return result;
}
/**
* ڻ
* @param data1
* @param data2
* @return
*/
public static double innerProduct(double data1[], double data2[]){
double sum = 0;
for(int i = 0; i < data1.length; i++)
sum += data1[i] * data2[i];
return sum;
}
/**
*
* @param data1
* @param data2
* @return
*/
public static double[][] outerProduct(double data1[], double data2[]){
double result[][] = new double[data1.length][data2.length];
for(int i = 0; i < data1.length; i++){
for(int j = 0; j < data2.length; j++){
result[i][j] = data1[i] * data2[j];
}
}
return result;
}
/**
* ȡģ
* @param data
* @return
*/
public static double module(double data[]){
double sum = 0;
for(int i = 0; i < data.length; i++){
sum += data[i]*data[i];
}
return Math.sqrt(sum);
}
/**
* Գ
* @param data
* @param mul
* @return
*/
public static double[] multiply(double data[], double mul){
double result[] = new double[data.length];
for(int i = 0; i < data.length; i++){
result[i] = data[i] * mul;
}
return result;
}
/**
* ͬĵλ
* @param data
* @return
*/
public static double[] unit(double data[]){
double mod = module(data);
return multiply(data, 1/mod);
}
/**
* йһ
* @param data
*/
public static double[] normalize(double data[]){
double value = absMax(data);
for(int i = 0; i < data.length; i++){
data[i] = data[i] / value;
}
return data;
}
/**
* Ԫ
* @param data
* @return
*/
public static double max(double data[]){
double value = data[0];
for(int i = 1; i < data.length; i++){
if(data[i] > value)
value = data[i];
}
return value;
}
public static double absMax(double data[]){
double value = Math.abs(data[0]);
for(int i = 1; i < data.length; i++){
if(Math.abs(data[i]) > value)
value = Math.abs(data[i]);
}
return value;
}
public static double[] avg(double dataArr[][]){
double avgData[] = new double[dataArr[0].length];
for(int i = 0; i < avgData.length; i++)
avgData[i] = 0;
for(int i = 0; i < dataArr.length; i++){
double data[] = dataArr[i];
for(int j = 0; j < data.length; j++){
avgData[j] += data[j];
}
}
for(int i = 0; i < avgData.length; i++)
avgData[i] /= dataArr.length;
return avgData;
}
}
| 2,852 | 18.277027 | 71 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/toolbox/lr/SampleType.java | package toolbox.lr;
public class SampleType {
public static final int POSITIVE = 1;
public static final int NEGATIVE = 0;
}
| 128 | 15.125 | 38 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/toolbox/lr/Sample.java | package toolbox.lr;
import org.dzhuang.dynamic.util.Parameter;
public class Sample implements Comparable{
public double data[];
public int type;
public Sample(double data[], int type){
this.data = data;
this.type = type;
}
public String toString(){
if(data.length == 0)
return "[]";
String str = "[" + Parameter.df.format(data[0]);
for(int i = 1; i < data.length; i++){
str += ", " + Parameter.df.format(data[i]);
}
str += "]";
str += " ���ͣ�" + ((type == SampleType.POSITIVE) ? "������" : "������");
return str;
}
public void toLogValue(double offset){
for(int i = 1; i < data.length; i++){
data[i] = Math.log(data[i] + offset);
}
}
public boolean equals(Object o){
if(!(o instanceof Sample)){
return false;
}
Sample s = (Sample) o;
boolean equal = true;
for(int i = 0; i < data.length; i++){
if(data[i] != s.data[i]){
equal = false;
break;
}
}
return equal;
}
public int compareTo(Object o){
Sample s = (Sample)o;
boolean equal = true;
for(int i = 0; i < data.length; i++){
if(data[i] != s.data[i]){
equal = false;
break;
}
}
if(equal)
return 0;
else if(Math.random() > 0.5)
return 1;
else
return -1;
}
}
| 1,231 | 17.953846 | 74 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/toolbox/lr/Param.java | package toolbox.lr;
import org.dzhuang.dynamic.util.Parameter;
public class Param {
public double data[]; //����ֵ
public Param(int paramNum, double initValue){
data = new double[paramNum];
for(int i = 0; i < data.length; i++)
data[i] = initValue;
}
public Param(double data[]){
this.data = data;
}
public void setParam(double data[]){
for(int i = 0 ; i < data.length; i++){
this.data[i] = data[i];
}
}
public String toString(){
if(data.length == 0)
return "[]";
String str = "[" + Parameter.df.format(data[0]);
for(int i = 1; i < data.length; i++){
str += ", " + Parameter.df.format(data[i]);
}
str += "]";
return str;
}
}
| 678 | 17.861111 | 50 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/toolbox/svm/SvmSample.java | package toolbox.svm;
import libsvm.*;
public class SvmSample {
public svm_node[] x;
public int type;
public SvmSample(svm_node x[], int type){
this.x = x;
this.type = type;
}
}
| 192 | 11.0625 | 42 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/toolbox/svm/SVM.java | package toolbox.svm;
import java.util.*;
import java.io.*;
import toolbox.*;
import toolbox.lr.*;
import toolbox.svm.*;
import libsvm.*;
public class SVM {
public static void trainModel(String trainPath, String modelPath, double n2pRatio, int maxSize) throws Exception{
ArrayList<Sample> trainList = ClassifierUtil.readSampleList(trainPath);
trainList = ClassifierUtil.adjustSampleRatio(trainList, n2pRatio);
trainList = ClassifierUtil.limitSampleSize(trainList, maxSize);
trainList = ClassifierUtil.logScaleSample(trainList);
ClassifierUtil.writeLibsvmSampleList(trainList, "sample.train");
String args[] = {"-t", "0", "sample.train", modelPath};
svm_train train = new svm_train();
train.run(args);
new File("sample.train").delete();
}
public static svm_model trainModel(String trainPath, double n2pRatio, int maxSize) throws Exception{
ArrayList<Sample> trainList = ClassifierUtil.readSampleList(trainPath);
trainList = ClassifierUtil.adjustSampleRatio(trainList, n2pRatio);
trainList = ClassifierUtil.limitSampleSize(trainList, maxSize);
trainList = ClassifierUtil.logScaleSample(trainList);
ClassifierUtil.writeLibsvmSampleList(trainList, "sample.train");
String args[] = {"-t", "0", "sample.train", "sample.model"};
svm_train train = new svm_train();
train.run(args);
svm_model model = svm.svm_load_model("sample.model");
new File("sample.train").delete();
new File("sample.model").delete();
return model;
}
public static double [] predict(String trainPath, String testPath, double n2pRatio, int maxSize) throws Exception{
ArrayList<Sample> trainList = ClassifierUtil.readSampleList(trainPath);
trainList = ClassifierUtil.adjustSampleRatio(trainList, n2pRatio);
trainList = ClassifierUtil.limitSampleSize(trainList, maxSize);
trainList = ClassifierUtil.logScaleSample(trainList);
ArrayList<Sample> testList = ClassifierUtil.readSampleList(testPath);
testList = ClassifierUtil.logScaleSample(testList);
return start(trainList, testList);
}
public static double [] crossValidation(String samplePath, int fold, double n2pRatio, int maxSize) throws Exception{
ArrayList<Sample> sampleList = ClassifierUtil.readSampleList(samplePath);
sampleList = ClassifierUtil.logScaleSample(sampleList);
ArrayList<Sample> listArr[] = ClassifierUtil.samplePartition(sampleList, fold);
double result[] = new double[3];
double num[] = new double[3];
for(int i = 0; i < fold; i++){
ArrayList<Sample> trainList = new ArrayList();
ArrayList<Sample> testList = new ArrayList();
for(int j = 0; j < fold; j++){
if(i == j)
testList.addAll(listArr[j]);
else
trainList.addAll(listArr[j]);
}
trainList = ClassifierUtil.adjustSampleRatio(trainList, n2pRatio);
trainList = ClassifierUtil.limitSampleSize(trainList, maxSize);
double subResult[] = start(trainList, testList);
for(int j = 0; j < 3; j++){
if(!Double.isNaN(subResult[j]) && !Double.isInfinite(subResult[j])){
result[j] += subResult[j];
num[j]++;
}
}
System.out.println("Run #" + (i+1) + " Precision: " + subResult[0] + " Recall: " + subResult[1] + " fScore: " + subResult[2]);
}
result = VectorUtil.multiply(result, 1.0/fold);
return result;
}
public static double[] start(ArrayList<Sample> trainList, ArrayList<Sample> testList) throws Exception{
ClassifierUtil.writeLibsvmSampleList(trainList, "sample.train");
String args[] = {"-t", "0", "sample.train", "sample.model"};
svm_train train = new svm_train();
train.run(args);
svm_model model = svm.svm_load_model("sample.model");
float positives = 0, hits = 0, preNum = 0;
for(int i = 0; i < testList.size(); i++){
SvmSample sample = ClassifierUtil.parseSvmSample(testList.get(i));
if(sample.type == SampleType.POSITIVE)
positives++;
int preType = SampleType.NEGATIVE;
double v = svm.svm_predict(model, sample.x);
if(v > 0){
preType = SampleType.POSITIVE;
preNum++;
if(preType == sample.type)
hits++;
}
}
double precision = hits / preNum;
double recall = hits / positives;
double fScore = 2 * precision * recall / (precision + recall);
new File("sample.train").delete();
new File("sample.model").delete();
System.out.println("Predicted positives: " + preNum + " Hits: " + hits + " Real positives: " + positives);
return new double[]{precision, recall, fScore};
}
}
| 4,396 | 38.972727 | 135 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/toolbox/svm/svm_toy.java | package toolbox.svm;
import libsvm.*;
import java.applet.*;
import java.awt.*;
import java.util.*;
import java.awt.event.*;
import java.io.*;
public class svm_toy extends Applet {
static final String DEFAULT_PARAM="-t 2 -c 100";
int XLEN;
int YLEN;
// off-screen buffer
Image buffer;
Graphics buffer_gc;
// pre-allocated colors
final static Color colors[] =
{
new Color(0,0,0),
new Color(0,120,120),
new Color(120,120,0),
new Color(120,0,120),
new Color(0,200,200),
new Color(200,200,0),
new Color(200,0,200)
};
class point {
point(double x, double y, byte value)
{
this.x = x;
this.y = y;
this.value = value;
}
double x, y;
byte value;
}
Vector<point> point_list = new Vector<point>();
byte current_value = 1;
public void init()
{
setSize(getSize());
final Button button_change = new Button("Change");
Button button_run = new Button("Run");
Button button_clear = new Button("Clear");
Button button_save = new Button("Save");
Button button_load = new Button("Load");
final TextField input_line = new TextField(DEFAULT_PARAM);
BorderLayout layout = new BorderLayout();
this.setLayout(layout);
Panel p = new Panel();
GridBagLayout gridbag = new GridBagLayout();
p.setLayout(gridbag);
GridBagConstraints c = new GridBagConstraints();
c.fill = GridBagConstraints.HORIZONTAL;
c.weightx = 1;
c.gridwidth = 1;
gridbag.setConstraints(button_change,c);
gridbag.setConstraints(button_run,c);
gridbag.setConstraints(button_clear,c);
gridbag.setConstraints(button_save,c);
gridbag.setConstraints(button_load,c);
c.weightx = 5;
c.gridwidth = 5;
gridbag.setConstraints(input_line,c);
button_change.setBackground(colors[current_value]);
p.add(button_change);
p.add(button_run);
p.add(button_clear);
p.add(button_save);
p.add(button_load);
p.add(input_line);
this.add(p,BorderLayout.SOUTH);
button_change.addActionListener(new ActionListener()
{ public void actionPerformed (ActionEvent e)
{ button_change_clicked(); button_change.setBackground(colors[current_value]); }});
button_run.addActionListener(new ActionListener()
{ public void actionPerformed (ActionEvent e)
{ button_run_clicked(input_line.getText()); }});
button_clear.addActionListener(new ActionListener()
{ public void actionPerformed (ActionEvent e)
{ button_clear_clicked(); }});
button_save.addActionListener(new ActionListener()
{ public void actionPerformed (ActionEvent e)
{ button_save_clicked(input_line.getText()); }});
button_load.addActionListener(new ActionListener()
{ public void actionPerformed (ActionEvent e)
{ button_load_clicked(); }});
input_line.addActionListener(new ActionListener()
{ public void actionPerformed (ActionEvent e)
{ button_run_clicked(input_line.getText()); }});
this.enableEvents(AWTEvent.MOUSE_EVENT_MASK);
}
void draw_point(point p)
{
Color c = colors[p.value+3];
Graphics window_gc = getGraphics();
buffer_gc.setColor(c);
buffer_gc.fillRect((int)(p.x*XLEN),(int)(p.y*YLEN),4,4);
window_gc.setColor(c);
window_gc.fillRect((int)(p.x*XLEN),(int)(p.y*YLEN),4,4);
}
void clear_all()
{
point_list.removeAllElements();
if(buffer != null)
{
buffer_gc.setColor(colors[0]);
buffer_gc.fillRect(0,0,XLEN,YLEN);
}
repaint();
}
void draw_all_points()
{
int n = point_list.size();
for(int i=0;i<n;i++)
draw_point(point_list.elementAt(i));
}
void button_change_clicked()
{
++current_value;
if(current_value > 3) current_value = 1;
}
private static double atof(String s)
{
return Double.valueOf(s).doubleValue();
}
private static int atoi(String s)
{
return Integer.parseInt(s);
}
void button_run_clicked(String args)
{
// guard
if(point_list.isEmpty()) return;
svm_parameter param = new svm_parameter();
// default values
param.svm_type = svm_parameter.C_SVC;
param.kernel_type = svm_parameter.RBF;
param.degree = 3;
param.gamma = 0;
param.coef0 = 0;
param.nu = 0.5;
param.cache_size = 40;
param.C = 1;
param.eps = 1e-3;
param.p = 0.1;
param.shrinking = 1;
param.probability = 0;
param.nr_weight = 0;
param.weight_label = new int[0];
param.weight = new double[0];
// parse options
StringTokenizer st = new StringTokenizer(args);
String[] argv = new String[st.countTokens()];
for(int i=0;i<argv.length;i++)
argv[i] = st.nextToken();
for(int i=0;i<argv.length;i++)
{
if(argv[i].charAt(0) != '-') break;
if(++i>=argv.length)
{
System.err.print("unknown option\n");
break;
}
switch(argv[i-1].charAt(1))
{
case 's':
param.svm_type = atoi(argv[i]);
break;
case 't':
param.kernel_type = atoi(argv[i]);
break;
case 'd':
param.degree = atoi(argv[i]);
break;
case 'g':
param.gamma = atof(argv[i]);
break;
case 'r':
param.coef0 = atof(argv[i]);
break;
case 'n':
param.nu = atof(argv[i]);
break;
case 'm':
param.cache_size = atof(argv[i]);
break;
case 'c':
param.C = atof(argv[i]);
break;
case 'e':
param.eps = atof(argv[i]);
break;
case 'p':
param.p = atof(argv[i]);
break;
case 'h':
param.shrinking = atoi(argv[i]);
break;
case 'b':
param.probability = atoi(argv[i]);
break;
case 'w':
++param.nr_weight;
{
int[] old = param.weight_label;
param.weight_label = new int[param.nr_weight];
System.arraycopy(old,0,param.weight_label,0,param.nr_weight-1);
}
{
double[] old = param.weight;
param.weight = new double[param.nr_weight];
System.arraycopy(old,0,param.weight,0,param.nr_weight-1);
}
param.weight_label[param.nr_weight-1] = atoi(argv[i-1].substring(2));
param.weight[param.nr_weight-1] = atof(argv[i]);
break;
default:
System.err.print("unknown option\n");
}
}
// build problem
svm_problem prob = new svm_problem();
prob.l = point_list.size();
prob.y = new double[prob.l];
if(param.kernel_type == svm_parameter.PRECOMPUTED)
{
}
else if(param.svm_type == svm_parameter.EPSILON_SVR ||
param.svm_type == svm_parameter.NU_SVR)
{
if(param.gamma == 0) param.gamma = 1;
prob.x = new svm_node[prob.l][1];
for(int i=0;i<prob.l;i++)
{
point p = point_list.elementAt(i);
prob.x[i][0] = new svm_node();
prob.x[i][0].index = 1;
prob.x[i][0].value = p.x;
prob.y[i] = p.y;
}
// build model & classify
svm_model model = svm.svm_train(prob, param);
svm_node[] x = new svm_node[1];
x[0] = new svm_node();
x[0].index = 1;
int[] j = new int[XLEN];
Graphics window_gc = getGraphics();
for (int i = 0; i < XLEN; i++)
{
x[0].value = (double) i / XLEN;
j[i] = (int)(YLEN*svm.svm_predict(model, x));
}
buffer_gc.setColor(colors[0]);
buffer_gc.drawLine(0,0,0,YLEN-1);
window_gc.setColor(colors[0]);
window_gc.drawLine(0,0,0,YLEN-1);
int p = (int)(param.p * YLEN);
for(int i=1;i<XLEN;i++)
{
buffer_gc.setColor(colors[0]);
buffer_gc.drawLine(i,0,i,YLEN-1);
window_gc.setColor(colors[0]);
window_gc.drawLine(i,0,i,YLEN-1);
buffer_gc.setColor(colors[5]);
window_gc.setColor(colors[5]);
buffer_gc.drawLine(i-1,j[i-1],i,j[i]);
window_gc.drawLine(i-1,j[i-1],i,j[i]);
if(param.svm_type == svm_parameter.EPSILON_SVR)
{
buffer_gc.setColor(colors[2]);
window_gc.setColor(colors[2]);
buffer_gc.drawLine(i-1,j[i-1]+p,i,j[i]+p);
window_gc.drawLine(i-1,j[i-1]+p,i,j[i]+p);
buffer_gc.setColor(colors[2]);
window_gc.setColor(colors[2]);
buffer_gc.drawLine(i-1,j[i-1]-p,i,j[i]-p);
window_gc.drawLine(i-1,j[i-1]-p,i,j[i]-p);
}
}
}
else
{
if(param.gamma == 0) param.gamma = 0.5;
prob.x = new svm_node [prob.l][2];
for(int i=0;i<prob.l;i++)
{
point p = point_list.elementAt(i);
prob.x[i][0] = new svm_node();
prob.x[i][0].index = 1;
prob.x[i][0].value = p.x;
prob.x[i][1] = new svm_node();
prob.x[i][1].index = 2;
prob.x[i][1].value = p.y;
prob.y[i] = p.value;
}
// build model & classify
svm_model model = svm.svm_train(prob, param);
svm_node[] x = new svm_node[2];
x[0] = new svm_node();
x[1] = new svm_node();
x[0].index = 1;
x[1].index = 2;
Graphics window_gc = getGraphics();
for (int i = 0; i < XLEN; i++)
for (int j = 0; j < YLEN ; j++) {
x[0].value = (double) i / XLEN;
x[1].value = (double) j / YLEN;
double d = svm.svm_predict(model, x);
if (param.svm_type == svm_parameter.ONE_CLASS && d<0) d=2;
buffer_gc.setColor(colors[(int)d]);
window_gc.setColor(colors[(int)d]);
buffer_gc.drawLine(i,j,i,j);
window_gc.drawLine(i,j,i,j);
}
}
draw_all_points();
}
void button_clear_clicked()
{
clear_all();
}
void button_save_clicked(String args)
{
FileDialog dialog = new FileDialog(new Frame(),"Save",FileDialog.SAVE);
dialog.setVisible(true);
String filename = dialog.getDirectory() + dialog.getFile();
if (filename == null) return;
try {
DataOutputStream fp = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(filename)));
int svm_type = svm_parameter.C_SVC;
int svm_type_idx = args.indexOf("-s ");
if(svm_type_idx != -1)
{
StringTokenizer svm_str_st = new StringTokenizer(args.substring(svm_type_idx+2).trim());
svm_type = atoi(svm_str_st.nextToken());
}
int n = point_list.size();
if(svm_type == svm_parameter.EPSILON_SVR || svm_type == svm_parameter.NU_SVR)
{
for(int i=0;i<n;i++)
{
point p = point_list.elementAt(i);
fp.writeBytes(p.y+" 1:"+p.x+"\n");
}
}
else
{
for(int i=0;i<n;i++)
{
point p = point_list.elementAt(i);
fp.writeBytes(p.value+" 1:"+p.x+" 2:"+p.y+"\n");
}
}
fp.close();
} catch (IOException e) { System.err.print(e); }
}
void button_load_clicked()
{
FileDialog dialog = new FileDialog(new Frame(),"Load",FileDialog.LOAD);
dialog.setVisible(true);
String filename = dialog.getDirectory() + dialog.getFile();
if (filename == null) return;
clear_all();
try {
BufferedReader fp = new BufferedReader(new FileReader(filename));
String line;
while((line = fp.readLine()) != null)
{
StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");
if(st.countTokens() == 5)
{
byte value = (byte)atoi(st.nextToken());
st.nextToken();
double x = atof(st.nextToken());
st.nextToken();
double y = atof(st.nextToken());
point_list.addElement(new point(x,y,value));
}
else if(st.countTokens() == 3)
{
double y = atof(st.nextToken());
st.nextToken();
double x = atof(st.nextToken());
point_list.addElement(new point(x,y,current_value));
}else
break;
}
fp.close();
} catch (IOException e) { System.err.print(e); }
draw_all_points();
}
protected void processMouseEvent(MouseEvent e)
{
if(e.getID() == MouseEvent.MOUSE_PRESSED)
{
if(e.getX() >= XLEN || e.getY() >= YLEN) return;
point p = new point((double)e.getX()/XLEN,
(double)e.getY()/YLEN,
current_value);
point_list.addElement(p);
draw_point(p);
}
}
public void paint(Graphics g)
{
// create buffer first time
if(buffer == null) {
buffer = this.createImage(XLEN,YLEN);
buffer_gc = buffer.getGraphics();
buffer_gc.setColor(colors[0]);
buffer_gc.fillRect(0,0,XLEN,YLEN);
}
g.drawImage(buffer,0,0,this);
}
public Dimension getPreferredSize() { return new Dimension(XLEN,YLEN+50); }
public void setSize(Dimension d) { setSize(d.width,d.height); }
public void setSize(int w,int h) {
super.setSize(w,h);
XLEN = w;
YLEN = h-50;
clear_all();
}
public static void main(String[] argv)
{
new AppletFrame("svm_toy",new svm_toy(),500,500+50);
}
}
class AppletFrame extends Frame {
AppletFrame(String title, Applet applet, int width, int height)
{
super(title);
this.addWindowListener(new WindowAdapter() {
public void windowClosing(WindowEvent e) {
System.exit(0);
}
});
applet.init();
applet.setSize(width,height);
applet.start();
this.add(applet);
this.pack();
this.setVisible(true);
}
}
| 12,291 | 23.340594 | 104 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/toolbox/svm/svm_scale.java | package toolbox.svm;
import libsvm.*;
import java.io.*;
import java.util.*;
import java.text.DecimalFormat;
public class svm_scale
{
private String line = null;
private double lower = -1.0;
private double upper = 1.0;
private double y_lower;
private double y_upper;
private boolean y_scaling = false;
private double[] feature_max;
private double[] feature_min;
private double y_max = -Double.MAX_VALUE;
private double y_min = Double.MAX_VALUE;
private int max_index;
private long num_nonzeros = 0;
private long new_num_nonzeros = 0;
private static void exit_with_help()
{
System.out.print(
"Usage: svm-scale [options] data_filename\n"
+"options:\n"
+"-l lower : x scaling lower limit (default -1)\n"
+"-u upper : x scaling upper limit (default +1)\n"
+"-y y_lower y_upper : y scaling limits (default: no y scaling)\n"
+"-s save_filename : save scaling parameters to save_filename\n"
+"-r restore_filename : restore scaling parameters from restore_filename\n"
);
System.exit(1);
}
private BufferedReader rewind(BufferedReader fp, String filename) throws IOException
{
fp.close();
return new BufferedReader(new FileReader(filename));
}
private void output_target(double value)
{
if(y_scaling)
{
if(value == y_min)
value = y_lower;
else if(value == y_max)
value = y_upper;
else
value = y_lower + (y_upper-y_lower) *
(value-y_min) / (y_max-y_min);
}
System.out.print(value + " ");
}
private void output(int index, double value)
{
/* skip single-valued attribute */
if(feature_max[index] == feature_min[index])
return;
if(value == feature_min[index])
value = lower;
else if(value == feature_max[index])
value = upper;
else
value = lower + (upper-lower) *
(value-feature_min[index])/
(feature_max[index]-feature_min[index]);
if(value != 0)
{
System.out.print(index + ":" + value + " ");
new_num_nonzeros++;
}
}
private String readline(BufferedReader fp) throws IOException
{
line = fp.readLine();
return line;
}
private void run(String []argv) throws IOException
{
int i,index;
BufferedReader fp = null, fp_restore = null;
String save_filename = null;
String restore_filename = null;
String data_filename = null;
for(i=0;i<argv.length;i++)
{
if (argv[i].charAt(0) != '-') break;
++i;
switch(argv[i-1].charAt(1))
{
case 'l': lower = Double.parseDouble(argv[i]); break;
case 'u': upper = Double.parseDouble(argv[i]); break;
case 'y':
y_lower = Double.parseDouble(argv[i]);
++i;
y_upper = Double.parseDouble(argv[i]);
y_scaling = true;
break;
case 's': save_filename = argv[i]; break;
case 'r': restore_filename = argv[i]; break;
default:
System.err.println("unknown option");
exit_with_help();
}
}
if(!(upper > lower) || (y_scaling && !(y_upper > y_lower)))
{
System.err.println("inconsistent lower/upper specification");
System.exit(1);
}
if(restore_filename != null && save_filename != null)
{
System.err.println("cannot use -r and -s simultaneously");
System.exit(1);
}
if(argv.length != i+1)
exit_with_help();
data_filename = argv[i];
try {
fp = new BufferedReader(new FileReader(data_filename));
} catch (Exception e) {
System.err.println("can't open file " + data_filename);
System.exit(1);
}
/* assumption: min index of attributes is 1 */
/* pass 1: find out max index of attributes */
max_index = 0;
if(restore_filename != null)
{
int idx, c;
try {
fp_restore = new BufferedReader(new FileReader(restore_filename));
}
catch (Exception e) {
System.err.println("can't open file " + restore_filename);
System.exit(1);
}
if((c = fp_restore.read()) == 'y')
{
fp_restore.readLine();
fp_restore.readLine();
fp_restore.readLine();
}
fp_restore.readLine();
fp_restore.readLine();
String restore_line = null;
while((restore_line = fp_restore.readLine())!=null)
{
StringTokenizer st2 = new StringTokenizer(restore_line);
idx = Integer.parseInt(st2.nextToken());
max_index = Math.max(max_index, idx);
}
fp_restore = rewind(fp_restore, restore_filename);
}
while (readline(fp) != null)
{
StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");
st.nextToken();
while(st.hasMoreTokens())
{
index = Integer.parseInt(st.nextToken());
max_index = Math.max(max_index, index);
st.nextToken();
num_nonzeros++;
}
}
try {
feature_max = new double[(max_index+1)];
feature_min = new double[(max_index+1)];
} catch(OutOfMemoryError e) {
System.err.println("can't allocate enough memory");
System.exit(1);
}
for(i=0;i<=max_index;i++)
{
feature_max[i] = -Double.MAX_VALUE;
feature_min[i] = Double.MAX_VALUE;
}
fp = rewind(fp, data_filename);
/* pass 2: find out min/max value */
while(readline(fp) != null)
{
int next_index = 1;
double target;
double value;
StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");
target = Double.parseDouble(st.nextToken());
y_max = Math.max(y_max, target);
y_min = Math.min(y_min, target);
while (st.hasMoreTokens())
{
index = Integer.parseInt(st.nextToken());
value = Double.parseDouble(st.nextToken());
for (i = next_index; i<index; i++)
{
feature_max[i] = Math.max(feature_max[i], 0);
feature_min[i] = Math.min(feature_min[i], 0);
}
feature_max[index] = Math.max(feature_max[index], value);
feature_min[index] = Math.min(feature_min[index], value);
next_index = index + 1;
}
for(i=next_index;i<=max_index;i++)
{
feature_max[i] = Math.max(feature_max[i], 0);
feature_min[i] = Math.min(feature_min[i], 0);
}
}
fp = rewind(fp, data_filename);
/* pass 2.5: save/restore feature_min/feature_max */
if(restore_filename != null)
{
// fp_restore rewinded in finding max_index
int idx, c;
double fmin, fmax;
fp_restore.mark(2); // for reset
if((c = fp_restore.read()) == 'y')
{
fp_restore.readLine(); // pass the '\n' after 'y'
StringTokenizer st = new StringTokenizer(fp_restore.readLine());
y_lower = Double.parseDouble(st.nextToken());
y_upper = Double.parseDouble(st.nextToken());
st = new StringTokenizer(fp_restore.readLine());
y_min = Double.parseDouble(st.nextToken());
y_max = Double.parseDouble(st.nextToken());
y_scaling = true;
}
else
fp_restore.reset();
if(fp_restore.read() == 'x') {
fp_restore.readLine(); // pass the '\n' after 'x'
StringTokenizer st = new StringTokenizer(fp_restore.readLine());
lower = Double.parseDouble(st.nextToken());
upper = Double.parseDouble(st.nextToken());
String restore_line = null;
while((restore_line = fp_restore.readLine())!=null)
{
StringTokenizer st2 = new StringTokenizer(restore_line);
idx = Integer.parseInt(st2.nextToken());
fmin = Double.parseDouble(st2.nextToken());
fmax = Double.parseDouble(st2.nextToken());
if (idx <= max_index)
{
feature_min[idx] = fmin;
feature_max[idx] = fmax;
}
}
}
fp_restore.close();
}
if(save_filename != null)
{
Formatter formatter = new Formatter(new StringBuilder());
BufferedWriter fp_save = null;
try {
fp_save = new BufferedWriter(new FileWriter(save_filename));
} catch(IOException e) {
System.err.println("can't open file " + save_filename);
System.exit(1);
}
if(y_scaling)
{
formatter.format("y\n");
formatter.format("%.16g %.16g\n", y_lower, y_upper);
formatter.format("%.16g %.16g\n", y_min, y_max);
}
formatter.format("x\n");
formatter.format("%.16g %.16g\n", lower, upper);
for(i=1;i<=max_index;i++)
{
if(feature_min[i] != feature_max[i])
formatter.format("%d %.16g %.16g\n", i, feature_min[i], feature_max[i]);
}
fp_save.write(formatter.toString());
fp_save.close();
}
/* pass 3: scale */
while(readline(fp) != null)
{
int next_index = 1;
double target;
double value;
StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");
target = Double.parseDouble(st.nextToken());
output_target(target);
while(st.hasMoreElements())
{
index = Integer.parseInt(st.nextToken());
value = Double.parseDouble(st.nextToken());
for (i = next_index; i<index; i++)
output(i, 0);
output(index, value);
next_index = index + 1;
}
for(i=next_index;i<= max_index;i++)
output(i, 0);
System.out.print("\n");
}
if (new_num_nonzeros > num_nonzeros)
System.err.print(
"WARNING: original #nonzeros " + num_nonzeros+"\n"
+" new #nonzeros " + new_num_nonzeros+"\n"
+"Use -l 0 if many original feature values are zeros\n");
fp.close();
}
public static void main(String argv[]) throws IOException
{
svm_scale s = new svm_scale();
s.run(argv);
}
}
| 8,973 | 24.422096 | 85 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/toolbox/svm/svm_train.java | package toolbox.svm;
import libsvm.*;
import java.io.*;
import java.util.*;
public class svm_train {
private svm_parameter param; // set by parse_command_line
private svm_problem prob; // set by read_problem
private svm_model model;
private String input_file_name; // set by parse_command_line
private String model_file_name; // set by parse_command_line
private String error_msg;
private int cross_validation;
private int nr_fold;
public static svm_print_interface svm_print_null = new svm_print_interface()
{
public void print(String s) {}
};
public static void exit_with_help()
{
System.out.print(
"Usage: svm_train [options] training_set_file [model_file]\n"
+"options:\n"
+"-s svm_type : set type of SVM (default 0)\n"
+" 0 -- C-SVC (multi-class classification)\n"
+" 1 -- nu-SVC (multi-class classification)\n"
+" 2 -- one-class SVM\n"
+" 3 -- epsilon-SVR (regression)\n"
+" 4 -- nu-SVR (regression)\n"
+"-t kernel_type : set type of kernel function (default 2)\n"
+" 0 -- linear: u'*v\n"
+" 1 -- polynomial: (gamma*u'*v + coef0)^degree\n"
+" 2 -- radial basis function: exp(-gamma*|u-v|^2)\n"
+" 3 -- sigmoid: tanh(gamma*u'*v + coef0)\n"
+" 4 -- precomputed kernel (kernel values in training_set_file)\n"
+"-d degree : set degree in kernel function (default 3)\n"
+"-g gamma : set gamma in kernel function (default 1/num_features)\n"
+"-r coef0 : set coef0 in kernel function (default 0)\n"
+"-c cost : set the parameter C of C-SVC, epsilon-SVR, and nu-SVR (default 1)\n"
+"-n nu : set the parameter nu of nu-SVC, one-class SVM, and nu-SVR (default 0.5)\n"
+"-p epsilon : set the epsilon in loss function of epsilon-SVR (default 0.1)\n"
+"-m cachesize : set cache memory size in MB (default 100)\n"
+"-e epsilon : set tolerance of termination criterion (default 0.001)\n"
+"-h shrinking : whether to use the shrinking heuristics, 0 or 1 (default 1)\n"
+"-b probability_estimates : whether to train a SVC or SVR model for probability estimates, 0 or 1 (default 0)\n"
+"-wi weight : set the parameter C of class i to weight*C, for C-SVC (default 1)\n"
+"-v n : n-fold cross validation mode\n"
+"-q : quiet mode (no outputs)\n"
);
System.exit(1);
}
public void do_cross_validation()
{
int i;
int total_correct = 0;
double total_error = 0;
double sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0;
double[] target = new double[prob.l];
svm.svm_cross_validation(prob,param,nr_fold,target);
if(param.svm_type == svm_parameter.EPSILON_SVR ||
param.svm_type == svm_parameter.NU_SVR)
{
for(i=0;i<prob.l;i++)
{
double y = prob.y[i];
double v = target[i];
total_error += (v-y)*(v-y);
sumv += v;
sumy += y;
sumvv += v*v;
sumyy += y*y;
sumvy += v*y;
}
System.out.print("Cross Validation Mean squared error = "+total_error/prob.l+"\n");
System.out.print("Cross Validation Squared correlation coefficient = "+
((prob.l*sumvy-sumv*sumy)*(prob.l*sumvy-sumv*sumy))/
((prob.l*sumvv-sumv*sumv)*(prob.l*sumyy-sumy*sumy))+"\n"
);
}
else
{
for(i=0;i<prob.l;i++)
if(target[i] == prob.y[i])
++total_correct;
System.out.print("Cross Validation Accuracy = "+100.0*total_correct/prob.l+"%\n");
}
}
public void run(String argv[]) throws IOException
{
parse_command_line(argv);
read_problem();
error_msg = svm.svm_check_parameter(prob,param);
if(error_msg != null)
{
System.err.print("ERROR: "+error_msg+"\n");
System.exit(1);
}
if(cross_validation != 0)
{
do_cross_validation();
}
else
{
model = svm.svm_train(prob,param);
svm.svm_save_model(model_file_name,model);
}
}
public static void main(String argv[]) throws IOException
{
svm_train t = new svm_train();
t.run(argv);
}
public static double atof(String s)
{
double d = Double.valueOf(s).doubleValue();
if (Double.isNaN(d) || Double.isInfinite(d))
{
System.err.print("NaN or Infinity in input\n");
System.exit(1);
}
return(d);
}
public static int atoi(String s)
{
return Integer.parseInt(s);
}
public void parse_command_line(String argv[])
{
int i;
svm_print_interface print_func = null; // default printing to stdout
param = new svm_parameter();
// default values
param.svm_type = svm_parameter.C_SVC;
param.kernel_type = svm_parameter.RBF;
param.degree = 3;
param.gamma = 0; // 1/num_features
param.coef0 = 0;
param.nu = 0.5;
param.cache_size = 100;
param.C = 1;
param.eps = 1e-3;
param.p = 0.1;
param.shrinking = 1;
param.probability = 0;
param.nr_weight = 0;
param.weight_label = new int[0];
param.weight = new double[0];
cross_validation = 0;
// parse options
for(i=0;i<argv.length;i++)
{
if(argv[i].charAt(0) != '-') break;
if(++i>=argv.length)
exit_with_help();
switch(argv[i-1].charAt(1))
{
case 's':
param.svm_type = atoi(argv[i]);
break;
case 't':
param.kernel_type = atoi(argv[i]);
break;
case 'd':
param.degree = atoi(argv[i]);
break;
case 'g':
param.gamma = atof(argv[i]);
break;
case 'r':
param.coef0 = atof(argv[i]);
break;
case 'n':
param.nu = atof(argv[i]);
break;
case 'm':
param.cache_size = atof(argv[i]);
break;
case 'c':
param.C = atof(argv[i]);
break;
case 'e':
param.eps = atof(argv[i]);
break;
case 'p':
param.p = atof(argv[i]);
break;
case 'h':
param.shrinking = atoi(argv[i]);
break;
case 'b':
param.probability = atoi(argv[i]);
break;
case 'q':
print_func = svm_print_null;
i--;
break;
case 'v':
cross_validation = 1;
nr_fold = atoi(argv[i]);
if(nr_fold < 2)
{
System.err.print("n-fold cross validation: n must >= 2\n");
exit_with_help();
}
break;
case 'w':
++param.nr_weight;
{
int[] old = param.weight_label;
param.weight_label = new int[param.nr_weight];
System.arraycopy(old,0,param.weight_label,0,param.nr_weight-1);
}
{
double[] old = param.weight;
param.weight = new double[param.nr_weight];
System.arraycopy(old,0,param.weight,0,param.nr_weight-1);
}
param.weight_label[param.nr_weight-1] = atoi(argv[i-1].substring(2));
param.weight[param.nr_weight-1] = atof(argv[i]);
break;
default:
System.err.print("Unknown option: " + argv[i-1] + "\n");
exit_with_help();
}
}
svm.svm_set_print_string_function(print_func);
// determine filenames
if(i>=argv.length)
exit_with_help();
input_file_name = argv[i];
if(i<argv.length-1)
model_file_name = argv[i+1];
else
{
int p = argv[i].lastIndexOf('/');
++p; // whew...
model_file_name = argv[i].substring(p)+".model";
}
}
// read in a problem (in svmlight format)
public void read_problem() throws IOException
{
BufferedReader fp = new BufferedReader(new FileReader(input_file_name));
Vector<Double> vy = new Vector<Double>();
Vector<svm_node[]> vx = new Vector<svm_node[]>();
int max_index = 0;
while(true)
{
String line = fp.readLine();
if(line == null) break;
StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");
vy.addElement(atof(st.nextToken()));
int m = st.countTokens()/2;
svm_node[] x = new svm_node[m];
for(int j=0;j<m;j++)
{
x[j] = new svm_node();
x[j].index = atoi(st.nextToken());
x[j].value = atof(st.nextToken());
}
if(m>0) max_index = Math.max(max_index, x[m-1].index);
vx.addElement(x);
}
prob = new svm_problem();
prob.l = vy.size();
prob.x = new svm_node[prob.l][];
for(int i=0;i<prob.l;i++)
prob.x[i] = vx.elementAt(i);
prob.y = new double[prob.l];
for(int i=0;i<prob.l;i++)
prob.y[i] = vy.elementAt(i);
if(param.gamma == 0 && max_index > 0)
param.gamma = 1.0/max_index;
if(param.kernel_type == svm_parameter.PRECOMPUTED)
for(int i=0;i<prob.l;i++)
{
if (prob.x[i][0].index != 0)
{
System.err.print("Wrong kernel matrix: first column must be 0:sample_serial_number\n");
System.exit(1);
}
if ((int)prob.x[i][0].value <= 0 || (int)prob.x[i][0].value > max_index)
{
System.err.print("Wrong input format: sample_serial_number out of range\n");
System.exit(1);
}
}
fp.close();
}
}
| 8,376 | 25.096573 | 115 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/toolbox/svm/svm_predict.java | package toolbox.svm;
import libsvm.*;
import java.io.*;
import java.util.*;
public class svm_predict {
public static svm_print_interface svm_print_null = new svm_print_interface()
{
public void print(String s) {}
};
public static svm_print_interface svm_print_stdout = new svm_print_interface()
{
public void print(String s)
{
System.out.print(s);
}
};
public static svm_print_interface svm_print_string = svm_print_stdout;
static void info(String s)
{
svm_print_string.print(s);
}
public static double atof(String s)
{
return Double.valueOf(s).doubleValue();
}
public static int atoi(String s)
{
return Integer.parseInt(s);
}
public static void predict(BufferedReader input, DataOutputStream output, svm_model model, int predict_probability) throws IOException
{
int correct = 0;
int total = 0;
double error = 0;
double sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0;
int svm_type=svm.svm_get_svm_type(model);
int nr_class=svm.svm_get_nr_class(model);
double[] prob_estimates=null;
if(predict_probability == 1)
{
if(svm_type == svm_parameter.EPSILON_SVR ||
svm_type == svm_parameter.NU_SVR)
{
svm_predict.info("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma="+svm.svm_get_svr_probability(model)+"\n");
}
else
{
int[] labels=new int[nr_class];
svm.svm_get_labels(model,labels);
prob_estimates = new double[nr_class];
output.writeBytes("labels");
for(int j=0;j<nr_class;j++)
output.writeBytes(" "+labels[j]);
output.writeBytes("\n");
}
}
while(true)
{
String line = input.readLine();
if(line == null) break;
StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");
double target = atof(st.nextToken());
int m = st.countTokens()/2;
svm_node[] x = new svm_node[m];
for(int j=0;j<m;j++)
{
x[j] = new svm_node();
x[j].index = atoi(st.nextToken());
x[j].value = atof(st.nextToken());
}
double v;
if (predict_probability==1 && (svm_type==svm_parameter.C_SVC || svm_type==svm_parameter.NU_SVC))
{
v = svm.svm_predict_probability(model,x,prob_estimates);
output.writeBytes(v+" ");
for(int j=0;j<nr_class;j++)
output.writeBytes(prob_estimates[j]+" ");
output.writeBytes("\n");
}
else
{
v = svm.svm_predict(model,x);
output.writeBytes(v+"\n");
}
if(v == target)
++correct;
error += (v-target)*(v-target);
sumv += v;
sumy += target;
sumvv += v*v;
sumyy += target*target;
sumvy += v*target;
++total;
}
if(svm_type == svm_parameter.EPSILON_SVR ||
svm_type == svm_parameter.NU_SVR)
{
svm_predict.info("Mean squared error = "+error/total+" (regression)\n");
svm_predict.info("Squared correlation coefficient = "+
((total*sumvy-sumv*sumy)*(total*sumvy-sumv*sumy))/
((total*sumvv-sumv*sumv)*(total*sumyy-sumy*sumy))+
" (regression)\n");
}
else
svm_predict.info("Accuracy = "+(double)correct/total*100+
"% ("+correct+"/"+total+") (classification)\n");
}
public static void exit_with_help()
{
System.err.print("usage: svm_predict [options] test_file model_file output_file\n"
+"options:\n"
+"-b probability_estimates: whether to predict probability estimates, 0 or 1 (default 0); one-class SVM not supported yet\n"
+"-q : quiet mode (no outputs)\n");
System.exit(1);
}
public static void main(String argv[]) throws IOException
{
int i, predict_probability=0;
svm_print_string = svm_print_stdout;
// parse options
for(i=0;i<argv.length;i++)
{
if(argv[i].charAt(0) != '-') break;
++i;
switch(argv[i-1].charAt(1))
{
case 'b':
predict_probability = atoi(argv[i]);
break;
case 'q':
svm_print_string = svm_print_null;
i--;
break;
default:
System.err.print("Unknown option: " + argv[i-1] + "\n");
exit_with_help();
}
}
if(i>=argv.length-2)
exit_with_help();
try
{
BufferedReader input = new BufferedReader(new FileReader(argv[i]));
DataOutputStream output = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(argv[i+2])));
svm_model model = svm.svm_load_model(argv[i+1]);
if (model == null)
{
System.err.print("can't open model file "+argv[i+1]+"\n");
System.exit(1);
}
if(predict_probability == 1)
{
if(svm.svm_check_probability_model(model)==0)
{
System.err.print("Model does not support probabiliy estimates\n");
System.exit(1);
}
}
else
{
if(svm.svm_check_probability_model(model)!=0)
{
svm_predict.info("Model supports probability estimates, but disabled in prediction.\n");
}
}
predict(input,output,model,predict_probability);
input.close();
output.close();
}
catch(FileNotFoundException e)
{
exit_with_help();
}
catch(ArrayIndexOutOfBoundsException e)
{
exit_with_help();
}
}
}
| 4,972 | 24.372449 | 183 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/toolbox/svm/libsvm/svm.java | package toolbox.svm.libsvm;
import java.io.*;
import java.util.*;
//
// Kernel Cache
//
// l is the number of total data items
// size is the cache size limit in bytes
//
class Cache {
private final int l;
private long size;
private final class head_t
{
head_t prev, next; // a cicular list
float[] data;
int len; // data[0,len) is cached in this entry
}
private final head_t[] head;
private head_t lru_head;
Cache(int l_, long size_)
{
l = l_;
size = size_;
head = new head_t[l];
for(int i=0;i<l;i++) head[i] = new head_t();
size /= 4;
size -= l * (16/4); // sizeof(head_t) == 16
size = Math.max(size, 2* (long) l); // cache must be large enough for two columns
lru_head = new head_t();
lru_head.next = lru_head.prev = lru_head;
}
private void lru_delete(head_t h)
{
// delete from current location
h.prev.next = h.next;
h.next.prev = h.prev;
}
private void lru_insert(head_t h)
{
// insert to last position
h.next = lru_head;
h.prev = lru_head.prev;
h.prev.next = h;
h.next.prev = h;
}
// request data [0,len)
// return some position p where [p,len) need to be filled
// (p >= len if nothing needs to be filled)
// java: simulate pointer using single-element array
int get_data(int index, float[][] data, int len)
{
head_t h = head[index];
if(h.len > 0) lru_delete(h);
int more = len - h.len;
if(more > 0)
{
// free old space
while(size < more)
{
head_t old = lru_head.next;
lru_delete(old);
size += old.len;
old.data = null;
old.len = 0;
}
// allocate new space
float[] new_data = new float[len];
if(h.data != null) System.arraycopy(h.data,0,new_data,0,h.len);
h.data = new_data;
size -= more;
do {int _=h.len; h.len=len; len=_;} while(false);
}
lru_insert(h);
data[0] = h.data;
return len;
}
void swap_index(int i, int j)
{
if(i==j) return;
if(head[i].len > 0) lru_delete(head[i]);
if(head[j].len > 0) lru_delete(head[j]);
do {float[] _=head[i].data; head[i].data=head[j].data; head[j].data=_;} while(false);
do {int _=head[i].len; head[i].len=head[j].len; head[j].len=_;} while(false);
if(head[i].len > 0) lru_insert(head[i]);
if(head[j].len > 0) lru_insert(head[j]);
if(i>j) do {int _=i; i=j; j=_;} while(false);
for(head_t h = lru_head.next; h!=lru_head; h=h.next)
{
if(h.len > i)
{
if(h.len > j)
do {float _=h.data[i]; h.data[i]=h.data[j]; h.data[j]=_;} while(false);
else
{
// give up
lru_delete(h);
size += h.len;
h.data = null;
h.len = 0;
}
}
}
}
}
//
// Kernel evaluation
//
// the static method k_function is for doing single kernel evaluation
// the constructor of Kernel prepares to calculate the l*l kernel matrix
// the member function get_Q is for getting one column from the Q Matrix
//
abstract class QMatrix {
abstract float[] get_Q(int column, int len);
abstract double[] get_QD();
abstract void swap_index(int i, int j);
};
abstract class Kernel extends QMatrix {
private svm_node[][] x;
private final double[] x_square;
// svm_parameter
private final int kernel_type;
private final int degree;
private final double gamma;
private final double coef0;
abstract float[] get_Q(int column, int len);
abstract double[] get_QD();
void swap_index(int i, int j)
{
do {svm_node[] _=x[i]; x[i]=x[j]; x[j]=_;} while(false);
if(x_square != null) do {double _=x_square[i]; x_square[i]=x_square[j]; x_square[j]=_;} while(false);
}
private static double powi(double base, int times)
{
double tmp = base, ret = 1.0;
for(int t=times; t>0; t/=2)
{
if(t%2==1) ret*=tmp;
tmp = tmp * tmp;
}
return ret;
}
double kernel_function(int i, int j)
{
switch(kernel_type)
{
case svm_parameter.LINEAR:
return dot(x[i],x[j]);
case svm_parameter.POLY:
return powi(gamma*dot(x[i],x[j])+coef0,degree);
case svm_parameter.RBF:
return Math.exp(-gamma*(x_square[i]+x_square[j]-2*dot(x[i],x[j])));
case svm_parameter.SIGMOID:
return Math.tanh(gamma*dot(x[i],x[j])+coef0);
case svm_parameter.PRECOMPUTED:
return x[i][(int)(x[j][0].value)].value;
default:
return 0; // java
}
}
Kernel(int l, svm_node[][] x_, svm_parameter param)
{
this.kernel_type = param.kernel_type;
this.degree = param.degree;
this.gamma = param.gamma;
this.coef0 = param.coef0;
x = (svm_node[][])x_.clone();
if(kernel_type == svm_parameter.RBF)
{
x_square = new double[l];
for(int i=0;i<l;i++)
x_square[i] = dot(x[i],x[i]);
}
else x_square = null;
}
static double dot(svm_node[] x, svm_node[] y)
{
double sum = 0;
int xlen = x.length;
int ylen = y.length;
int i = 0;
int j = 0;
while(i < xlen && j < ylen)
{
if(x[i].index == y[j].index)
sum += x[i++].value * y[j++].value;
else
{
if(x[i].index > y[j].index)
++j;
else
++i;
}
}
return sum;
}
static double k_function(svm_node[] x, svm_node[] y,
svm_parameter param)
{
switch(param.kernel_type)
{
case svm_parameter.LINEAR:
return dot(x,y);
case svm_parameter.POLY:
return powi(param.gamma*dot(x,y)+param.coef0,param.degree);
case svm_parameter.RBF:
{
double sum = 0;
int xlen = x.length;
int ylen = y.length;
int i = 0;
int j = 0;
while(i < xlen && j < ylen)
{
if(x[i].index == y[j].index)
{
double d = x[i++].value - y[j++].value;
sum += d*d;
}
else if(x[i].index > y[j].index)
{
sum += y[j].value * y[j].value;
++j;
}
else
{
sum += x[i].value * x[i].value;
++i;
}
}
while(i < xlen)
{
sum += x[i].value * x[i].value;
++i;
}
while(j < ylen)
{
sum += y[j].value * y[j].value;
++j;
}
return Math.exp(-param.gamma*sum);
}
case svm_parameter.SIGMOID:
return Math.tanh(param.gamma*dot(x,y)+param.coef0);
case svm_parameter.PRECOMPUTED:
return x[(int)(y[0].value)].value;
default:
return 0; // java
}
}
}
// An SMO algorithm in Fan et al., JMLR 6(2005), p. 1889--1918
// Solves:
//
// min 0.5(\alpha^T Q \alpha) + p^T \alpha
//
// y^T \alpha = \delta
// y_i = +1 or -1
// 0 <= alpha_i <= Cp for y_i = 1
// 0 <= alpha_i <= Cn for y_i = -1
//
// Given:
//
// Q, p, y, Cp, Cn, and an initial feasible point \alpha
// l is the size of vectors and matrices
// eps is the stopping tolerance
//
// solution will be put in \alpha, objective value will be put in obj
//
class Solver {
int active_size;
byte[] y;
double[] G; // gradient of objective function
static final byte LOWER_BOUND = 0;
static final byte UPPER_BOUND = 1;
static final byte FREE = 2;
byte[] alpha_status; // LOWER_BOUND, UPPER_BOUND, FREE
double[] alpha;
QMatrix Q;
double[] QD;
double eps;
double Cp,Cn;
double[] p;
int[] active_set;
double[] G_bar; // gradient, if we treat free variables as 0
int l;
boolean unshrink; // XXX
static final double INF = java.lang.Double.POSITIVE_INFINITY;
double get_C(int i)
{
return (y[i] > 0)? Cp : Cn;
}
void update_alpha_status(int i)
{
if(alpha[i] >= get_C(i))
alpha_status[i] = UPPER_BOUND;
else if(alpha[i] <= 0)
alpha_status[i] = LOWER_BOUND;
else alpha_status[i] = FREE;
}
boolean is_upper_bound(int i) { return alpha_status[i] == UPPER_BOUND; }
boolean is_lower_bound(int i) { return alpha_status[i] == LOWER_BOUND; }
boolean is_free(int i) { return alpha_status[i] == FREE; }
// java: information about solution except alpha,
// because we cannot return multiple values otherwise...
static class SolutionInfo {
double obj;
double rho;
double upper_bound_p;
double upper_bound_n;
double r; // for Solver_NU
}
void swap_index(int i, int j)
{
Q.swap_index(i,j);
do {byte _=y[i]; y[i]=y[j]; y[j]=_;} while(false);
do {double _=G[i]; G[i]=G[j]; G[j]=_;} while(false);
do {byte _=alpha_status[i]; alpha_status[i]=alpha_status[j]; alpha_status[j]=_;} while(false);
do {double _=alpha[i]; alpha[i]=alpha[j]; alpha[j]=_;} while(false);
do {double _=p[i]; p[i]=p[j]; p[j]=_;} while(false);
do {int _=active_set[i]; active_set[i]=active_set[j]; active_set[j]=_;} while(false);
do {double _=G_bar[i]; G_bar[i]=G_bar[j]; G_bar[j]=_;} while(false);
}
void reconstruct_gradient()
{
// reconstruct inactive elements of G from G_bar and free variables
if(active_size == l) return;
int i,j;
int nr_free = 0;
for(j=active_size;j<l;j++)
G[j] = G_bar[j] + p[j];
for(j=0;j<active_size;j++)
if(is_free(j))
nr_free++;
if(2*nr_free < active_size)
svm.info("\nWARNING: using -h 0 may be faster\n");
if (nr_free*l > 2*active_size*(l-active_size))
{
for(i=active_size;i<l;i++)
{
float[] Q_i = Q.get_Q(i,active_size);
for(j=0;j<active_size;j++)
if(is_free(j))
G[i] += alpha[j] * Q_i[j];
}
}
else
{
for(i=0;i<active_size;i++)
if(is_free(i))
{
float[] Q_i = Q.get_Q(i,l);
double alpha_i = alpha[i];
for(j=active_size;j<l;j++)
G[j] += alpha_i * Q_i[j];
}
}
}
void Solve(int l, QMatrix Q, double[] p_, byte[] y_,
double[] alpha_, double Cp, double Cn, double eps, SolutionInfo si, int shrinking)
{
this.l = l;
this.Q = Q;
QD = Q.get_QD();
p = (double[])p_.clone();
y = (byte[])y_.clone();
alpha = (double[])alpha_.clone();
this.Cp = Cp;
this.Cn = Cn;
this.eps = eps;
this.unshrink = false;
// initialize alpha_status
{
alpha_status = new byte[l];
for(int i=0;i<l;i++)
update_alpha_status(i);
}
// initialize active set (for shrinking)
{
active_set = new int[l];
for(int i=0;i<l;i++)
active_set[i] = i;
active_size = l;
}
// initialize gradient
{
G = new double[l];
G_bar = new double[l];
int i;
for(i=0;i<l;i++)
{
G[i] = p[i];
G_bar[i] = 0;
}
for(i=0;i<l;i++)
if(!is_lower_bound(i))
{
float[] Q_i = Q.get_Q(i,l);
double alpha_i = alpha[i];
int j;
for(j=0;j<l;j++)
G[j] += alpha_i*Q_i[j];
if(is_upper_bound(i))
for(j=0;j<l;j++)
G_bar[j] += get_C(i) * Q_i[j];
}
}
// optimization step
int iter = 0;
int max_iter = Math.max(10000000, l>Integer.MAX_VALUE/100 ? Integer.MAX_VALUE : 100*l);
int counter = Math.min(l,1000)+1;
int[] working_set = new int[2];
while(iter < max_iter)
{
// show progress and do shrinking
if(--counter == 0)
{
counter = Math.min(l,1000);
if(shrinking!=0) do_shrinking();
svm.info(".");
}
if(select_working_set(working_set)!=0)
{
// reconstruct the whole gradient
reconstruct_gradient();
// reset active set size and check
active_size = l;
svm.info("*");
if(select_working_set(working_set)!=0)
break;
else
counter = 1; // do shrinking next iteration
}
int i = working_set[0];
int j = working_set[1];
++iter;
// update alpha[i] and alpha[j], handle bounds carefully
float[] Q_i = Q.get_Q(i,active_size);
float[] Q_j = Q.get_Q(j,active_size);
double C_i = get_C(i);
double C_j = get_C(j);
double old_alpha_i = alpha[i];
double old_alpha_j = alpha[j];
if(y[i]!=y[j])
{
double quad_coef = QD[i]+QD[j]+2*Q_i[j];
if (quad_coef <= 0)
quad_coef = 1e-12;
double delta = (-G[i]-G[j])/quad_coef;
double diff = alpha[i] - alpha[j];
alpha[i] += delta;
alpha[j] += delta;
if(diff > 0)
{
if(alpha[j] < 0)
{
alpha[j] = 0;
alpha[i] = diff;
}
}
else
{
if(alpha[i] < 0)
{
alpha[i] = 0;
alpha[j] = -diff;
}
}
if(diff > C_i - C_j)
{
if(alpha[i] > C_i)
{
alpha[i] = C_i;
alpha[j] = C_i - diff;
}
}
else
{
if(alpha[j] > C_j)
{
alpha[j] = C_j;
alpha[i] = C_j + diff;
}
}
}
else
{
double quad_coef = QD[i]+QD[j]-2*Q_i[j];
if (quad_coef <= 0)
quad_coef = 1e-12;
double delta = (G[i]-G[j])/quad_coef;
double sum = alpha[i] + alpha[j];
alpha[i] -= delta;
alpha[j] += delta;
if(sum > C_i)
{
if(alpha[i] > C_i)
{
alpha[i] = C_i;
alpha[j] = sum - C_i;
}
}
else
{
if(alpha[j] < 0)
{
alpha[j] = 0;
alpha[i] = sum;
}
}
if(sum > C_j)
{
if(alpha[j] > C_j)
{
alpha[j] = C_j;
alpha[i] = sum - C_j;
}
}
else
{
if(alpha[i] < 0)
{
alpha[i] = 0;
alpha[j] = sum;
}
}
}
// update G
double delta_alpha_i = alpha[i] - old_alpha_i;
double delta_alpha_j = alpha[j] - old_alpha_j;
for(int k=0;k<active_size;k++)
{
G[k] += Q_i[k]*delta_alpha_i + Q_j[k]*delta_alpha_j;
}
// update alpha_status and G_bar
{
boolean ui = is_upper_bound(i);
boolean uj = is_upper_bound(j);
update_alpha_status(i);
update_alpha_status(j);
int k;
if(ui != is_upper_bound(i))
{
Q_i = Q.get_Q(i,l);
if(ui)
for(k=0;k<l;k++)
G_bar[k] -= C_i * Q_i[k];
else
for(k=0;k<l;k++)
G_bar[k] += C_i * Q_i[k];
}
if(uj != is_upper_bound(j))
{
Q_j = Q.get_Q(j,l);
if(uj)
for(k=0;k<l;k++)
G_bar[k] -= C_j * Q_j[k];
else
for(k=0;k<l;k++)
G_bar[k] += C_j * Q_j[k];
}
}
}
if(iter >= max_iter)
{
if(active_size < l)
{
// reconstruct the whole gradient to calculate objective value
reconstruct_gradient();
active_size = l;
svm.info("*");
}
System.err.print("\nWARNING: reaching max number of iterations\n");
}
// calculate rho
si.rho = calculate_rho();
// calculate objective value
{
double v = 0;
int i;
for(i=0;i<l;i++)
v += alpha[i] * (G[i] + p[i]);
si.obj = v/2;
}
// put back the solution
{
for(int i=0;i<l;i++)
alpha_[active_set[i]] = alpha[i];
}
si.upper_bound_p = Cp;
si.upper_bound_n = Cn;
svm.info("\noptimization finished, #iter = "+iter+"\n");
}
// return 1 if already optimal, return 0 otherwise
int select_working_set(int[] working_set)
{
// return i,j such that
// i: maximizes -y_i * grad(f)_i, i in I_up(\alpha)
// j: mimimizes the decrease of obj value
// (if quadratic coefficeint <= 0, replace it with tau)
// -y_j*grad(f)_j < -y_i*grad(f)_i, j in I_low(\alpha)
double Gmax = -INF;
double Gmax2 = -INF;
int Gmax_idx = -1;
int Gmin_idx = -1;
double obj_diff_min = INF;
for(int t=0;t<active_size;t++)
if(y[t]==+1)
{
if(!is_upper_bound(t))
if(-G[t] >= Gmax)
{
Gmax = -G[t];
Gmax_idx = t;
}
}
else
{
if(!is_lower_bound(t))
if(G[t] >= Gmax)
{
Gmax = G[t];
Gmax_idx = t;
}
}
int i = Gmax_idx;
float[] Q_i = null;
if(i != -1) // null Q_i not accessed: Gmax=-INF if i=-1
Q_i = Q.get_Q(i,active_size);
for(int j=0;j<active_size;j++)
{
if(y[j]==+1)
{
if (!is_lower_bound(j))
{
double grad_diff=Gmax+G[j];
if (G[j] >= Gmax2)
Gmax2 = G[j];
if (grad_diff > 0)
{
double obj_diff;
double quad_coef = QD[i]+QD[j]-2.0*y[i]*Q_i[j];
if (quad_coef > 0)
obj_diff = -(grad_diff*grad_diff)/quad_coef;
else
obj_diff = -(grad_diff*grad_diff)/1e-12;
if (obj_diff <= obj_diff_min)
{
Gmin_idx=j;
obj_diff_min = obj_diff;
}
}
}
}
else
{
if (!is_upper_bound(j))
{
double grad_diff= Gmax-G[j];
if (-G[j] >= Gmax2)
Gmax2 = -G[j];
if (grad_diff > 0)
{
double obj_diff;
double quad_coef = QD[i]+QD[j]+2.0*y[i]*Q_i[j];
if (quad_coef > 0)
obj_diff = -(grad_diff*grad_diff)/quad_coef;
else
obj_diff = -(grad_diff*grad_diff)/1e-12;
if (obj_diff <= obj_diff_min)
{
Gmin_idx=j;
obj_diff_min = obj_diff;
}
}
}
}
}
if(Gmax+Gmax2 < eps)
return 1;
working_set[0] = Gmax_idx;
working_set[1] = Gmin_idx;
return 0;
}
private boolean be_shrunk(int i, double Gmax1, double Gmax2)
{
if(is_upper_bound(i))
{
if(y[i]==+1)
return(-G[i] > Gmax1);
else
return(-G[i] > Gmax2);
}
else if(is_lower_bound(i))
{
if(y[i]==+1)
return(G[i] > Gmax2);
else
return(G[i] > Gmax1);
}
else
return(false);
}
void do_shrinking()
{
int i;
double Gmax1 = -INF; // max { -y_i * grad(f)_i | i in I_up(\alpha) }
double Gmax2 = -INF; // max { y_i * grad(f)_i | i in I_low(\alpha) }
// find maximal violating pair first
for(i=0;i<active_size;i++)
{
if(y[i]==+1)
{
if(!is_upper_bound(i))
{
if(-G[i] >= Gmax1)
Gmax1 = -G[i];
}
if(!is_lower_bound(i))
{
if(G[i] >= Gmax2)
Gmax2 = G[i];
}
}
else
{
if(!is_upper_bound(i))
{
if(-G[i] >= Gmax2)
Gmax2 = -G[i];
}
if(!is_lower_bound(i))
{
if(G[i] >= Gmax1)
Gmax1 = G[i];
}
}
}
if(unshrink == false && Gmax1 + Gmax2 <= eps*10)
{
unshrink = true;
reconstruct_gradient();
active_size = l;
}
for(i=0;i<active_size;i++)
if (be_shrunk(i, Gmax1, Gmax2))
{
active_size--;
while (active_size > i)
{
if (!be_shrunk(active_size, Gmax1, Gmax2))
{
swap_index(i,active_size);
break;
}
active_size--;
}
}
}
double calculate_rho()
{
double r;
int nr_free = 0;
double ub = INF, lb = -INF, sum_free = 0;
for(int i=0;i<active_size;i++)
{
double yG = y[i]*G[i];
if(is_lower_bound(i))
{
if(y[i] > 0)
ub = Math.min(ub,yG);
else
lb = Math.max(lb,yG);
}
else if(is_upper_bound(i))
{
if(y[i] < 0)
ub = Math.min(ub,yG);
else
lb = Math.max(lb,yG);
}
else
{
++nr_free;
sum_free += yG;
}
}
if(nr_free>0)
r = sum_free/nr_free;
else
r = (ub+lb)/2;
return r;
}
}
//
// Solver for nu-svm classification and regression
//
// additional constraint: e^T \alpha = constant
//
final class Solver_NU extends Solver
{
private SolutionInfo si;
void Solve(int l, QMatrix Q, double[] p, byte[] y,
double[] alpha, double Cp, double Cn, double eps,
SolutionInfo si, int shrinking)
{
this.si = si;
super.Solve(l,Q,p,y,alpha,Cp,Cn,eps,si,shrinking);
}
// return 1 if already optimal, return 0 otherwise
int select_working_set(int[] working_set)
{
// return i,j such that y_i = y_j and
// i: maximizes -y_i * grad(f)_i, i in I_up(\alpha)
// j: minimizes the decrease of obj value
// (if quadratic coefficeint <= 0, replace it with tau)
// -y_j*grad(f)_j < -y_i*grad(f)_i, j in I_low(\alpha)
double Gmaxp = -INF;
double Gmaxp2 = -INF;
int Gmaxp_idx = -1;
double Gmaxn = -INF;
double Gmaxn2 = -INF;
int Gmaxn_idx = -1;
int Gmin_idx = -1;
double obj_diff_min = INF;
for(int t=0;t<active_size;t++)
if(y[t]==+1)
{
if(!is_upper_bound(t))
if(-G[t] >= Gmaxp)
{
Gmaxp = -G[t];
Gmaxp_idx = t;
}
}
else
{
if(!is_lower_bound(t))
if(G[t] >= Gmaxn)
{
Gmaxn = G[t];
Gmaxn_idx = t;
}
}
int ip = Gmaxp_idx;
int in = Gmaxn_idx;
float[] Q_ip = null;
float[] Q_in = null;
if(ip != -1) // null Q_ip not accessed: Gmaxp=-INF if ip=-1
Q_ip = Q.get_Q(ip,active_size);
if(in != -1)
Q_in = Q.get_Q(in,active_size);
for(int j=0;j<active_size;j++)
{
if(y[j]==+1)
{
if (!is_lower_bound(j))
{
double grad_diff=Gmaxp+G[j];
if (G[j] >= Gmaxp2)
Gmaxp2 = G[j];
if (grad_diff > 0)
{
double obj_diff;
double quad_coef = QD[ip]+QD[j]-2*Q_ip[j];
if (quad_coef > 0)
obj_diff = -(grad_diff*grad_diff)/quad_coef;
else
obj_diff = -(grad_diff*grad_diff)/1e-12;
if (obj_diff <= obj_diff_min)
{
Gmin_idx=j;
obj_diff_min = obj_diff;
}
}
}
}
else
{
if (!is_upper_bound(j))
{
double grad_diff=Gmaxn-G[j];
if (-G[j] >= Gmaxn2)
Gmaxn2 = -G[j];
if (grad_diff > 0)
{
double obj_diff;
double quad_coef = QD[in]+QD[j]-2*Q_in[j];
if (quad_coef > 0)
obj_diff = -(grad_diff*grad_diff)/quad_coef;
else
obj_diff = -(grad_diff*grad_diff)/1e-12;
if (obj_diff <= obj_diff_min)
{
Gmin_idx=j;
obj_diff_min = obj_diff;
}
}
}
}
}
if(Math.max(Gmaxp+Gmaxp2,Gmaxn+Gmaxn2) < eps)
return 1;
if(y[Gmin_idx] == +1)
working_set[0] = Gmaxp_idx;
else
working_set[0] = Gmaxn_idx;
working_set[1] = Gmin_idx;
return 0;
}
private boolean be_shrunk(int i, double Gmax1, double Gmax2, double Gmax3, double Gmax4)
{
if(is_upper_bound(i))
{
if(y[i]==+1)
return(-G[i] > Gmax1);
else
return(-G[i] > Gmax4);
}
else if(is_lower_bound(i))
{
if(y[i]==+1)
return(G[i] > Gmax2);
else
return(G[i] > Gmax3);
}
else
return(false);
}
void do_shrinking()
{
double Gmax1 = -INF; // max { -y_i * grad(f)_i | y_i = +1, i in I_up(\alpha) }
double Gmax2 = -INF; // max { y_i * grad(f)_i | y_i = +1, i in I_low(\alpha) }
double Gmax3 = -INF; // max { -y_i * grad(f)_i | y_i = -1, i in I_up(\alpha) }
double Gmax4 = -INF; // max { y_i * grad(f)_i | y_i = -1, i in I_low(\alpha) }
// find maximal violating pair first
int i;
for(i=0;i<active_size;i++)
{
if(!is_upper_bound(i))
{
if(y[i]==+1)
{
if(-G[i] > Gmax1) Gmax1 = -G[i];
}
else if(-G[i] > Gmax4) Gmax4 = -G[i];
}
if(!is_lower_bound(i))
{
if(y[i]==+1)
{
if(G[i] > Gmax2) Gmax2 = G[i];
}
else if(G[i] > Gmax3) Gmax3 = G[i];
}
}
if(unshrink == false && Math.max(Gmax1+Gmax2,Gmax3+Gmax4) <= eps*10)
{
unshrink = true;
reconstruct_gradient();
active_size = l;
}
for(i=0;i<active_size;i++)
if (be_shrunk(i, Gmax1, Gmax2, Gmax3, Gmax4))
{
active_size--;
while (active_size > i)
{
if (!be_shrunk(active_size, Gmax1, Gmax2, Gmax3, Gmax4))
{
swap_index(i,active_size);
break;
}
active_size--;
}
}
}
double calculate_rho()
{
int nr_free1 = 0,nr_free2 = 0;
double ub1 = INF, ub2 = INF;
double lb1 = -INF, lb2 = -INF;
double sum_free1 = 0, sum_free2 = 0;
for(int i=0;i<active_size;i++)
{
if(y[i]==+1)
{
if(is_lower_bound(i))
ub1 = Math.min(ub1,G[i]);
else if(is_upper_bound(i))
lb1 = Math.max(lb1,G[i]);
else
{
++nr_free1;
sum_free1 += G[i];
}
}
else
{
if(is_lower_bound(i))
ub2 = Math.min(ub2,G[i]);
else if(is_upper_bound(i))
lb2 = Math.max(lb2,G[i]);
else
{
++nr_free2;
sum_free2 += G[i];
}
}
}
double r1,r2;
if(nr_free1 > 0)
r1 = sum_free1/nr_free1;
else
r1 = (ub1+lb1)/2;
if(nr_free2 > 0)
r2 = sum_free2/nr_free2;
else
r2 = (ub2+lb2)/2;
si.r = (r1+r2)/2;
return (r1-r2)/2;
}
}
//
// Q matrices for various formulations
//
class SVC_Q extends Kernel
{
private final byte[] y;
private final Cache cache;
private final double[] QD;
SVC_Q(svm_problem prob, svm_parameter param, byte[] y_)
{
super(prob.l, prob.x, param);
y = (byte[])y_.clone();
cache = new Cache(prob.l,(long)(param.cache_size*(1<<20)));
QD = new double[prob.l];
for(int i=0;i<prob.l;i++)
QD[i] = kernel_function(i,i);
}
float[] get_Q(int i, int len)
{
float[][] data = new float[1][];
int start, j;
if((start = cache.get_data(i,data,len)) < len)
{
for(j=start;j<len;j++)
data[0][j] = (float)(y[i]*y[j]*kernel_function(i,j));
}
return data[0];
}
double[] get_QD()
{
return QD;
}
void swap_index(int i, int j)
{
cache.swap_index(i,j);
super.swap_index(i,j);
do {byte _=y[i]; y[i]=y[j]; y[j]=_;} while(false);
do {double _=QD[i]; QD[i]=QD[j]; QD[j]=_;} while(false);
}
}
class ONE_CLASS_Q extends Kernel
{
private final Cache cache;
private final double[] QD;
ONE_CLASS_Q(svm_problem prob, svm_parameter param)
{
super(prob.l, prob.x, param);
cache = new Cache(prob.l,(long)(param.cache_size*(1<<20)));
QD = new double[prob.l];
for(int i=0;i<prob.l;i++)
QD[i] = kernel_function(i,i);
}
float[] get_Q(int i, int len)
{
float[][] data = new float[1][];
int start, j;
if((start = cache.get_data(i,data,len)) < len)
{
for(j=start;j<len;j++)
data[0][j] = (float)kernel_function(i,j);
}
return data[0];
}
double[] get_QD()
{
return QD;
}
void swap_index(int i, int j)
{
cache.swap_index(i,j);
super.swap_index(i,j);
do {double _=QD[i]; QD[i]=QD[j]; QD[j]=_;} while(false);
}
}
class SVR_Q extends Kernel
{
private final int l;
private final Cache cache;
private final byte[] sign;
private final int[] index;
private int next_buffer;
private float[][] buffer;
private final double[] QD;
SVR_Q(svm_problem prob, svm_parameter param)
{
super(prob.l, prob.x, param);
l = prob.l;
cache = new Cache(l,(long)(param.cache_size*(1<<20)));
QD = new double[2*l];
sign = new byte[2*l];
index = new int[2*l];
for(int k=0;k<l;k++)
{
sign[k] = 1;
sign[k+l] = -1;
index[k] = k;
index[k+l] = k;
QD[k] = kernel_function(k,k);
QD[k+l] = QD[k];
}
buffer = new float[2][2*l];
next_buffer = 0;
}
void swap_index(int i, int j)
{
do {byte _=sign[i]; sign[i]=sign[j]; sign[j]=_;} while(false);
do {int _=index[i]; index[i]=index[j]; index[j]=_;} while(false);
do {double _=QD[i]; QD[i]=QD[j]; QD[j]=_;} while(false);
}
float[] get_Q(int i, int len)
{
float[][] data = new float[1][];
int j, real_i = index[i];
if(cache.get_data(real_i,data,l) < l)
{
for(j=0;j<l;j++)
data[0][j] = (float)kernel_function(real_i,j);
}
// reorder and copy
float buf[] = buffer[next_buffer];
next_buffer = 1 - next_buffer;
byte si = sign[i];
for(j=0;j<len;j++)
buf[j] = (float) si * sign[j] * data[0][index[j]];
return buf;
}
double[] get_QD()
{
return QD;
}
}
public class svm {
//
// construct and solve various formulations
//
public static final int LIBSVM_VERSION=318;
public static final Random rand = new Random();
private static svm_print_interface svm_print_stdout = new svm_print_interface()
{
public void print(String s)
{
System.out.print(s);
System.out.flush();
}
};
private static svm_print_interface svm_print_string = svm_print_stdout;
static void info(String s)
{
svm_print_string.print(s);
}
private static void solve_c_svc(svm_problem prob, svm_parameter param,
double[] alpha, Solver.SolutionInfo si,
double Cp, double Cn)
{
int l = prob.l;
double[] minus_ones = new double[l];
byte[] y = new byte[l];
int i;
for(i=0;i<l;i++)
{
alpha[i] = 0;
minus_ones[i] = -1;
if(prob.y[i] > 0) y[i] = +1; else y[i] = -1;
}
Solver s = new Solver();
s.Solve(l, new SVC_Q(prob,param,y), minus_ones, y,
alpha, Cp, Cn, param.eps, si, param.shrinking);
double sum_alpha=0;
for(i=0;i<l;i++)
sum_alpha += alpha[i];
if (Cp==Cn)
svm.info("nu = "+sum_alpha/(Cp*prob.l)+"\n");
for(i=0;i<l;i++)
alpha[i] *= y[i];
}
private static void solve_nu_svc(svm_problem prob, svm_parameter param,
double[] alpha, Solver.SolutionInfo si)
{
int i;
int l = prob.l;
double nu = param.nu;
byte[] y = new byte[l];
for(i=0;i<l;i++)
if(prob.y[i]>0)
y[i] = +1;
else
y[i] = -1;
double sum_pos = nu*l/2;
double sum_neg = nu*l/2;
for(i=0;i<l;i++)
if(y[i] == +1)
{
alpha[i] = Math.min(1.0,sum_pos);
sum_pos -= alpha[i];
}
else
{
alpha[i] = Math.min(1.0,sum_neg);
sum_neg -= alpha[i];
}
double[] zeros = new double[l];
for(i=0;i<l;i++)
zeros[i] = 0;
Solver_NU s = new Solver_NU();
s.Solve(l, new SVC_Q(prob,param,y), zeros, y,
alpha, 1.0, 1.0, param.eps, si, param.shrinking);
double r = si.r;
svm.info("C = "+1/r+"\n");
for(i=0;i<l;i++)
alpha[i] *= y[i]/r;
si.rho /= r;
si.obj /= (r*r);
si.upper_bound_p = 1/r;
si.upper_bound_n = 1/r;
}
private static void solve_one_class(svm_problem prob, svm_parameter param,
double[] alpha, Solver.SolutionInfo si)
{
int l = prob.l;
double[] zeros = new double[l];
byte[] ones = new byte[l];
int i;
int n = (int)(param.nu*prob.l); // # of alpha's at upper bound
for(i=0;i<n;i++)
alpha[i] = 1;
if(n<prob.l)
alpha[n] = param.nu * prob.l - n;
for(i=n+1;i<l;i++)
alpha[i] = 0;
for(i=0;i<l;i++)
{
zeros[i] = 0;
ones[i] = 1;
}
Solver s = new Solver();
s.Solve(l, new ONE_CLASS_Q(prob,param), zeros, ones,
alpha, 1.0, 1.0, param.eps, si, param.shrinking);
}
private static void solve_epsilon_svr(svm_problem prob, svm_parameter param,
double[] alpha, Solver.SolutionInfo si)
{
int l = prob.l;
double[] alpha2 = new double[2*l];
double[] linear_term = new double[2*l];
byte[] y = new byte[2*l];
int i;
for(i=0;i<l;i++)
{
alpha2[i] = 0;
linear_term[i] = param.p - prob.y[i];
y[i] = 1;
alpha2[i+l] = 0;
linear_term[i+l] = param.p + prob.y[i];
y[i+l] = -1;
}
Solver s = new Solver();
s.Solve(2*l, new SVR_Q(prob,param), linear_term, y,
alpha2, param.C, param.C, param.eps, si, param.shrinking);
double sum_alpha = 0;
for(i=0;i<l;i++)
{
alpha[i] = alpha2[i] - alpha2[i+l];
sum_alpha += Math.abs(alpha[i]);
}
svm.info("nu = "+sum_alpha/(param.C*l)+"\n");
}
private static void solve_nu_svr(svm_problem prob, svm_parameter param,
double[] alpha, Solver.SolutionInfo si)
{
int l = prob.l;
double C = param.C;
double[] alpha2 = new double[2*l];
double[] linear_term = new double[2*l];
byte[] y = new byte[2*l];
int i;
double sum = C * param.nu * l / 2;
for(i=0;i<l;i++)
{
alpha2[i] = alpha2[i+l] = Math.min(sum,C);
sum -= alpha2[i];
linear_term[i] = - prob.y[i];
y[i] = 1;
linear_term[i+l] = prob.y[i];
y[i+l] = -1;
}
Solver_NU s = new Solver_NU();
s.Solve(2*l, new SVR_Q(prob,param), linear_term, y,
alpha2, C, C, param.eps, si, param.shrinking);
svm.info("epsilon = "+(-si.r)+"\n");
for(i=0;i<l;i++)
alpha[i] = alpha2[i] - alpha2[i+l];
}
//
// decision_function
//
static class decision_function
{
double[] alpha;
double rho;
};
static decision_function svm_train_one(
svm_problem prob, svm_parameter param,
double Cp, double Cn)
{
double[] alpha = new double[prob.l];
Solver.SolutionInfo si = new Solver.SolutionInfo();
switch(param.svm_type)
{
case svm_parameter.C_SVC:
solve_c_svc(prob,param,alpha,si,Cp,Cn);
break;
case svm_parameter.NU_SVC:
solve_nu_svc(prob,param,alpha,si);
break;
case svm_parameter.ONE_CLASS:
solve_one_class(prob,param,alpha,si);
break;
case svm_parameter.EPSILON_SVR:
solve_epsilon_svr(prob,param,alpha,si);
break;
case svm_parameter.NU_SVR:
solve_nu_svr(prob,param,alpha,si);
break;
}
svm.info("obj = "+si.obj+", rho = "+si.rho+"\n");
// output SVs
int nSV = 0;
int nBSV = 0;
for(int i=0;i<prob.l;i++)
{
if(Math.abs(alpha[i]) > 0)
{
++nSV;
if(prob.y[i] > 0)
{
if(Math.abs(alpha[i]) >= si.upper_bound_p)
++nBSV;
}
else
{
if(Math.abs(alpha[i]) >= si.upper_bound_n)
++nBSV;
}
}
}
svm.info("nSV = "+nSV+", nBSV = "+nBSV+"\n");
decision_function f = new decision_function();
f.alpha = alpha;
f.rho = si.rho;
return f;
}
// Platt's binary SVM Probablistic Output: an improvement from Lin et al.
private static void sigmoid_train(int l, double[] dec_values, double[] labels,
double[] probAB)
{
double A, B;
double prior1=0, prior0 = 0;
int i;
for (i=0;i<l;i++)
if (labels[i] > 0) prior1+=1;
else prior0+=1;
int max_iter=100; // Maximal number of iterations
double min_step=1e-10; // Minimal step taken in line search
double sigma=1e-12; // For numerically strict PD of Hessian
double eps=1e-5;
double hiTarget=(prior1+1.0)/(prior1+2.0);
double loTarget=1/(prior0+2.0);
double[] t= new double[l];
double fApB,p,q,h11,h22,h21,g1,g2,det,dA,dB,gd,stepsize;
double newA,newB,newf,d1,d2;
int iter;
// Initial Point and Initial Fun Value
A=0.0; B=Math.log((prior0+1.0)/(prior1+1.0));
double fval = 0.0;
for (i=0;i<l;i++)
{
if (labels[i]>0) t[i]=hiTarget;
else t[i]=loTarget;
fApB = dec_values[i]*A+B;
if (fApB>=0)
fval += t[i]*fApB + Math.log(1+Math.exp(-fApB));
else
fval += (t[i] - 1)*fApB +Math.log(1+Math.exp(fApB));
}
for (iter=0;iter<max_iter;iter++)
{
// Update Gradient and Hessian (use H' = H + sigma I)
h11=sigma; // numerically ensures strict PD
h22=sigma;
h21=0.0;g1=0.0;g2=0.0;
for (i=0;i<l;i++)
{
fApB = dec_values[i]*A+B;
if (fApB >= 0)
{
p=Math.exp(-fApB)/(1.0+Math.exp(-fApB));
q=1.0/(1.0+Math.exp(-fApB));
}
else
{
p=1.0/(1.0+Math.exp(fApB));
q=Math.exp(fApB)/(1.0+Math.exp(fApB));
}
d2=p*q;
h11+=dec_values[i]*dec_values[i]*d2;
h22+=d2;
h21+=dec_values[i]*d2;
d1=t[i]-p;
g1+=dec_values[i]*d1;
g2+=d1;
}
// Stopping Criteria
if (Math.abs(g1)<eps && Math.abs(g2)<eps)
break;
// Finding Newton direction: -inv(H') * g
det=h11*h22-h21*h21;
dA=-(h22*g1 - h21 * g2) / det;
dB=-(-h21*g1+ h11 * g2) / det;
gd=g1*dA+g2*dB;
stepsize = 1; // Line Search
while (stepsize >= min_step)
{
newA = A + stepsize * dA;
newB = B + stepsize * dB;
// New function value
newf = 0.0;
for (i=0;i<l;i++)
{
fApB = dec_values[i]*newA+newB;
if (fApB >= 0)
newf += t[i]*fApB + Math.log(1+Math.exp(-fApB));
else
newf += (t[i] - 1)*fApB +Math.log(1+Math.exp(fApB));
}
// Check sufficient decrease
if (newf<fval+0.0001*stepsize*gd)
{
A=newA;B=newB;fval=newf;
break;
}
else
stepsize = stepsize / 2.0;
}
if (stepsize < min_step)
{
svm.info("Line search fails in two-class probability estimates\n");
break;
}
}
if (iter>=max_iter)
svm.info("Reaching maximal iterations in two-class probability estimates\n");
probAB[0]=A;probAB[1]=B;
}
private static double sigmoid_predict(double decision_value, double A, double B)
{
double fApB = decision_value*A+B;
if (fApB >= 0)
return Math.exp(-fApB)/(1.0+Math.exp(-fApB));
else
return 1.0/(1+Math.exp(fApB)) ;
}
// Method 2 from the multiclass_prob paper by Wu, Lin, and Weng
private static void multiclass_probability(int k, double[][] r, double[] p)
{
int t,j;
int iter = 0, max_iter=Math.max(100,k);
double[][] Q=new double[k][k];
double[] Qp=new double[k];
double pQp, eps=0.005/k;
for (t=0;t<k;t++)
{
p[t]=1.0/k; // Valid if k = 1
Q[t][t]=0;
for (j=0;j<t;j++)
{
Q[t][t]+=r[j][t]*r[j][t];
Q[t][j]=Q[j][t];
}
for (j=t+1;j<k;j++)
{
Q[t][t]+=r[j][t]*r[j][t];
Q[t][j]=-r[j][t]*r[t][j];
}
}
for (iter=0;iter<max_iter;iter++)
{
// stopping condition, recalculate QP,pQP for numerical accuracy
pQp=0;
for (t=0;t<k;t++)
{
Qp[t]=0;
for (j=0;j<k;j++)
Qp[t]+=Q[t][j]*p[j];
pQp+=p[t]*Qp[t];
}
double max_error=0;
for (t=0;t<k;t++)
{
double error=Math.abs(Qp[t]-pQp);
if (error>max_error)
max_error=error;
}
if (max_error<eps) break;
for (t=0;t<k;t++)
{
double diff=(-Qp[t]+pQp)/Q[t][t];
p[t]+=diff;
pQp=(pQp+diff*(diff*Q[t][t]+2*Qp[t]))/(1+diff)/(1+diff);
for (j=0;j<k;j++)
{
Qp[j]=(Qp[j]+diff*Q[t][j])/(1+diff);
p[j]/=(1+diff);
}
}
}
if (iter>=max_iter)
svm.info("Exceeds max_iter in multiclass_prob\n");
}
// Cross-validation decision values for probability estimates
private static void svm_binary_svc_probability(svm_problem prob, svm_parameter param, double Cp, double Cn, double[] probAB)
{
int i;
int nr_fold = 5;
int[] perm = new int[prob.l];
double[] dec_values = new double[prob.l];
// random shuffle
for(i=0;i<prob.l;i++) perm[i]=i;
for(i=0;i<prob.l;i++)
{
int j = i+rand.nextInt(prob.l-i);
do {int _=perm[i]; perm[i]=perm[j]; perm[j]=_;} while(false);
}
for(i=0;i<nr_fold;i++)
{
int begin = i*prob.l/nr_fold;
int end = (i+1)*prob.l/nr_fold;
int j,k;
svm_problem subprob = new svm_problem();
subprob.l = prob.l-(end-begin);
subprob.x = new svm_node[subprob.l][];
subprob.y = new double[subprob.l];
k=0;
for(j=0;j<begin;j++)
{
subprob.x[k] = prob.x[perm[j]];
subprob.y[k] = prob.y[perm[j]];
++k;
}
for(j=end;j<prob.l;j++)
{
subprob.x[k] = prob.x[perm[j]];
subprob.y[k] = prob.y[perm[j]];
++k;
}
int p_count=0,n_count=0;
for(j=0;j<k;j++)
if(subprob.y[j]>0)
p_count++;
else
n_count++;
if(p_count==0 && n_count==0)
for(j=begin;j<end;j++)
dec_values[perm[j]] = 0;
else if(p_count > 0 && n_count == 0)
for(j=begin;j<end;j++)
dec_values[perm[j]] = 1;
else if(p_count == 0 && n_count > 0)
for(j=begin;j<end;j++)
dec_values[perm[j]] = -1;
else
{
svm_parameter subparam = (svm_parameter)param.clone();
subparam.probability=0;
subparam.C=1.0;
subparam.nr_weight=2;
subparam.weight_label = new int[2];
subparam.weight = new double[2];
subparam.weight_label[0]=+1;
subparam.weight_label[1]=-1;
subparam.weight[0]=Cp;
subparam.weight[1]=Cn;
svm_model submodel = svm_train(subprob,subparam);
for(j=begin;j<end;j++)
{
double[] dec_value=new double[1];
svm_predict_values(submodel,prob.x[perm[j]],dec_value);
dec_values[perm[j]]=dec_value[0];
// ensure +1 -1 order; reason not using CV subroutine
dec_values[perm[j]] *= submodel.label[0];
}
}
}
sigmoid_train(prob.l,dec_values,prob.y,probAB);
}
// Return parameter of a Laplace distribution
private static double svm_svr_probability(svm_problem prob, svm_parameter param)
{
int i;
int nr_fold = 5;
double[] ymv = new double[prob.l];
double mae = 0;
svm_parameter newparam = (svm_parameter)param.clone();
newparam.probability = 0;
svm_cross_validation(prob,newparam,nr_fold,ymv);
for(i=0;i<prob.l;i++)
{
ymv[i]=prob.y[i]-ymv[i];
mae += Math.abs(ymv[i]);
}
mae /= prob.l;
double std=Math.sqrt(2*mae*mae);
int count=0;
mae=0;
for(i=0;i<prob.l;i++)
if (Math.abs(ymv[i]) > 5*std)
count=count+1;
else
mae+=Math.abs(ymv[i]);
mae /= (prob.l-count);
svm.info("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma="+mae+"\n");
return mae;
}
// label: label name, start: begin of each class, count: #data of classes, perm: indices to the original data
// perm, length l, must be allocated before calling this subroutine
private static void svm_group_classes(svm_problem prob, int[] nr_class_ret, int[][] label_ret, int[][] start_ret, int[][] count_ret, int[] perm)
{
int l = prob.l;
int max_nr_class = 16;
int nr_class = 0;
int[] label = new int[max_nr_class];
int[] count = new int[max_nr_class];
int[] data_label = new int[l];
int i;
for(i=0;i<l;i++)
{
int this_label = (int)(prob.y[i]);
int j;
for(j=0;j<nr_class;j++)
{
if(this_label == label[j])
{
++count[j];
break;
}
}
data_label[i] = j;
if(j == nr_class)
{
if(nr_class == max_nr_class)
{
max_nr_class *= 2;
int[] new_data = new int[max_nr_class];
System.arraycopy(label,0,new_data,0,label.length);
label = new_data;
new_data = new int[max_nr_class];
System.arraycopy(count,0,new_data,0,count.length);
count = new_data;
}
label[nr_class] = this_label;
count[nr_class] = 1;
++nr_class;
}
}
//
// Labels are ordered by their first occurrence in the training set.
// However, for two-class sets with -1/+1 labels and -1 appears first,
// we swap labels to ensure that internally the binary SVM has positive data corresponding to the +1 instances.
//
if (nr_class == 2 && label[0] == -1 && label[1] == +1)
{
do {int _=label[0]; label[0]=label[1]; label[1]=_;} while(false);
do {int _=count[0]; count[0]=count[1]; count[1]=_;} while(false);
for(i=0;i<l;i++)
{
if(data_label[i] == 0)
data_label[i] = 1;
else
data_label[i] = 0;
}
}
int[] start = new int[nr_class];
start[0] = 0;
for(i=1;i<nr_class;i++)
start[i] = start[i-1]+count[i-1];
for(i=0;i<l;i++)
{
perm[start[data_label[i]]] = i;
++start[data_label[i]];
}
start[0] = 0;
for(i=1;i<nr_class;i++)
start[i] = start[i-1]+count[i-1];
nr_class_ret[0] = nr_class;
label_ret[0] = label;
start_ret[0] = start;
count_ret[0] = count;
}
//
// Interface functions
//
public static svm_model svm_train(svm_problem prob, svm_parameter param)
{
svm_model model = new svm_model();
model.param = param;
if(param.svm_type == svm_parameter.ONE_CLASS ||
param.svm_type == svm_parameter.EPSILON_SVR ||
param.svm_type == svm_parameter.NU_SVR)
{
// regression or one-class-svm
model.nr_class = 2;
model.label = null;
model.nSV = null;
model.probA = null; model.probB = null;
model.sv_coef = new double[1][];
if(param.probability == 1 &&
(param.svm_type == svm_parameter.EPSILON_SVR ||
param.svm_type == svm_parameter.NU_SVR))
{
model.probA = new double[1];
model.probA[0] = svm_svr_probability(prob,param);
}
decision_function f = svm_train_one(prob,param,0,0);
model.rho = new double[1];
model.rho[0] = f.rho;
int nSV = 0;
int i;
for(i=0;i<prob.l;i++)
if(Math.abs(f.alpha[i]) > 0) ++nSV;
model.l = nSV;
model.SV = new svm_node[nSV][];
model.sv_coef[0] = new double[nSV];
model.sv_indices = new int[nSV];
int j = 0;
for(i=0;i<prob.l;i++)
if(Math.abs(f.alpha[i]) > 0)
{
model.SV[j] = prob.x[i];
model.sv_coef[0][j] = f.alpha[i];
model.sv_indices[j] = i+1;
++j;
}
}
else
{
// classification
int l = prob.l;
int[] tmp_nr_class = new int[1];
int[][] tmp_label = new int[1][];
int[][] tmp_start = new int[1][];
int[][] tmp_count = new int[1][];
int[] perm = new int[l];
// group training data of the same class
svm_group_classes(prob,tmp_nr_class,tmp_label,tmp_start,tmp_count,perm);
int nr_class = tmp_nr_class[0];
int[] label = tmp_label[0];
int[] start = tmp_start[0];
int[] count = tmp_count[0];
if(nr_class == 1)
svm.info("WARNING: training data in only one class. See README for details.\n");
svm_node[][] x = new svm_node[l][];
int i;
for(i=0;i<l;i++)
x[i] = prob.x[perm[i]];
// calculate weighted C
double[] weighted_C = new double[nr_class];
for(i=0;i<nr_class;i++)
weighted_C[i] = param.C;
for(i=0;i<param.nr_weight;i++)
{
int j;
for(j=0;j<nr_class;j++)
if(param.weight_label[i] == label[j])
break;
if(j == nr_class)
System.err.print("WARNING: class label "+param.weight_label[i]+" specified in weight is not found\n");
else
weighted_C[j] *= param.weight[i];
}
// train k*(k-1)/2 models
boolean[] nonzero = new boolean[l];
for(i=0;i<l;i++)
nonzero[i] = false;
decision_function[] f = new decision_function[nr_class*(nr_class-1)/2];
double[] probA=null,probB=null;
if (param.probability == 1)
{
probA=new double[nr_class*(nr_class-1)/2];
probB=new double[nr_class*(nr_class-1)/2];
}
int p = 0;
for(i=0;i<nr_class;i++)
for(int j=i+1;j<nr_class;j++)
{
svm_problem sub_prob = new svm_problem();
int si = start[i], sj = start[j];
int ci = count[i], cj = count[j];
sub_prob.l = ci+cj;
sub_prob.x = new svm_node[sub_prob.l][];
sub_prob.y = new double[sub_prob.l];
int k;
for(k=0;k<ci;k++)
{
sub_prob.x[k] = x[si+k];
sub_prob.y[k] = +1;
}
for(k=0;k<cj;k++)
{
sub_prob.x[ci+k] = x[sj+k];
sub_prob.y[ci+k] = -1;
}
if(param.probability == 1)
{
double[] probAB=new double[2];
svm_binary_svc_probability(sub_prob,param,weighted_C[i],weighted_C[j],probAB);
probA[p]=probAB[0];
probB[p]=probAB[1];
}
f[p] = svm_train_one(sub_prob,param,weighted_C[i],weighted_C[j]);
for(k=0;k<ci;k++)
if(!nonzero[si+k] && Math.abs(f[p].alpha[k]) > 0)
nonzero[si+k] = true;
for(k=0;k<cj;k++)
if(!nonzero[sj+k] && Math.abs(f[p].alpha[ci+k]) > 0)
nonzero[sj+k] = true;
++p;
}
// build output
model.nr_class = nr_class;
model.label = new int[nr_class];
for(i=0;i<nr_class;i++)
model.label[i] = label[i];
model.rho = new double[nr_class*(nr_class-1)/2];
for(i=0;i<nr_class*(nr_class-1)/2;i++)
model.rho[i] = f[i].rho;
if(param.probability == 1)
{
model.probA = new double[nr_class*(nr_class-1)/2];
model.probB = new double[nr_class*(nr_class-1)/2];
for(i=0;i<nr_class*(nr_class-1)/2;i++)
{
model.probA[i] = probA[i];
model.probB[i] = probB[i];
}
}
else
{
model.probA=null;
model.probB=null;
}
int nnz = 0;
int[] nz_count = new int[nr_class];
model.nSV = new int[nr_class];
for(i=0;i<nr_class;i++)
{
int nSV = 0;
for(int j=0;j<count[i];j++)
if(nonzero[start[i]+j])
{
++nSV;
++nnz;
}
model.nSV[i] = nSV;
nz_count[i] = nSV;
}
svm.info("Total nSV = "+nnz+"\n");
model.l = nnz;
model.SV = new svm_node[nnz][];
model.sv_indices = new int[nnz];
p = 0;
for(i=0;i<l;i++)
if(nonzero[i])
{
model.SV[p] = x[i];
model.sv_indices[p++] = perm[i] + 1;
}
int[] nz_start = new int[nr_class];
nz_start[0] = 0;
for(i=1;i<nr_class;i++)
nz_start[i] = nz_start[i-1]+nz_count[i-1];
model.sv_coef = new double[nr_class-1][];
for(i=0;i<nr_class-1;i++)
model.sv_coef[i] = new double[nnz];
p = 0;
for(i=0;i<nr_class;i++)
for(int j=i+1;j<nr_class;j++)
{
// classifier (i,j): coefficients with
// i are in sv_coef[j-1][nz_start[i]...],
// j are in sv_coef[i][nz_start[j]...]
int si = start[i];
int sj = start[j];
int ci = count[i];
int cj = count[j];
int q = nz_start[i];
int k;
for(k=0;k<ci;k++)
if(nonzero[si+k])
model.sv_coef[j-1][q++] = f[p].alpha[k];
q = nz_start[j];
for(k=0;k<cj;k++)
if(nonzero[sj+k])
model.sv_coef[i][q++] = f[p].alpha[ci+k];
++p;
}
}
return model;
}
// Stratified cross validation
public static void svm_cross_validation(svm_problem prob, svm_parameter param, int nr_fold, double[] target)
{
int i;
int[] fold_start = new int[nr_fold+1];
int l = prob.l;
int[] perm = new int[l];
// stratified cv may not give leave-one-out rate
// Each class to l folds -> some folds may have zero elements
if((param.svm_type == svm_parameter.C_SVC ||
param.svm_type == svm_parameter.NU_SVC) && nr_fold < l)
{
int[] tmp_nr_class = new int[1];
int[][] tmp_label = new int[1][];
int[][] tmp_start = new int[1][];
int[][] tmp_count = new int[1][];
svm_group_classes(prob,tmp_nr_class,tmp_label,tmp_start,tmp_count,perm);
int nr_class = tmp_nr_class[0];
int[] start = tmp_start[0];
int[] count = tmp_count[0];
// random shuffle and then data grouped by fold using the array perm
int[] fold_count = new int[nr_fold];
int c;
int[] index = new int[l];
for(i=0;i<l;i++)
index[i]=perm[i];
for (c=0; c<nr_class; c++)
for(i=0;i<count[c];i++)
{
int j = i+rand.nextInt(count[c]-i);
do {int _=index[start[c]+j]; index[start[c]+j]=index[start[c]+i]; index[start[c]+i]=_;} while(false);
}
for(i=0;i<nr_fold;i++)
{
fold_count[i] = 0;
for (c=0; c<nr_class;c++)
fold_count[i]+=(i+1)*count[c]/nr_fold-i*count[c]/nr_fold;
}
fold_start[0]=0;
for (i=1;i<=nr_fold;i++)
fold_start[i] = fold_start[i-1]+fold_count[i-1];
for (c=0; c<nr_class;c++)
for(i=0;i<nr_fold;i++)
{
int begin = start[c]+i*count[c]/nr_fold;
int end = start[c]+(i+1)*count[c]/nr_fold;
for(int j=begin;j<end;j++)
{
perm[fold_start[i]] = index[j];
fold_start[i]++;
}
}
fold_start[0]=0;
for (i=1;i<=nr_fold;i++)
fold_start[i] = fold_start[i-1]+fold_count[i-1];
}
else
{
for(i=0;i<l;i++) perm[i]=i;
for(i=0;i<l;i++)
{
int j = i+rand.nextInt(l-i);
do {int _=perm[i]; perm[i]=perm[j]; perm[j]=_;} while(false);
}
for(i=0;i<=nr_fold;i++)
fold_start[i]=i*l/nr_fold;
}
for(i=0;i<nr_fold;i++)
{
int begin = fold_start[i];
int end = fold_start[i+1];
int j,k;
svm_problem subprob = new svm_problem();
subprob.l = l-(end-begin);
subprob.x = new svm_node[subprob.l][];
subprob.y = new double[subprob.l];
k=0;
for(j=0;j<begin;j++)
{
subprob.x[k] = prob.x[perm[j]];
subprob.y[k] = prob.y[perm[j]];
++k;
}
for(j=end;j<l;j++)
{
subprob.x[k] = prob.x[perm[j]];
subprob.y[k] = prob.y[perm[j]];
++k;
}
svm_model submodel = svm_train(subprob,param);
if(param.probability==1 &&
(param.svm_type == svm_parameter.C_SVC ||
param.svm_type == svm_parameter.NU_SVC))
{
double[] prob_estimates= new double[svm_get_nr_class(submodel)];
for(j=begin;j<end;j++)
target[perm[j]] = svm_predict_probability(submodel,prob.x[perm[j]],prob_estimates);
}
else
for(j=begin;j<end;j++)
target[perm[j]] = svm_predict(submodel,prob.x[perm[j]]);
}
}
public static int svm_get_svm_type(svm_model model)
{
return model.param.svm_type;
}
public static int svm_get_nr_class(svm_model model)
{
return model.nr_class;
}
public static void svm_get_labels(svm_model model, int[] label)
{
if (model.label != null)
for(int i=0;i<model.nr_class;i++)
label[i] = model.label[i];
}
public static void svm_get_sv_indices(svm_model model, int[] indices)
{
if (model.sv_indices != null)
for(int i=0;i<model.l;i++)
indices[i] = model.sv_indices[i];
}
public static int svm_get_nr_sv(svm_model model)
{
return model.l;
}
public static double svm_get_svr_probability(svm_model model)
{
if ((model.param.svm_type == svm_parameter.EPSILON_SVR || model.param.svm_type == svm_parameter.NU_SVR) &&
model.probA!=null)
return model.probA[0];
else
{
System.err.print("Model doesn't contain information for SVR probability inference\n");
return 0;
}
}
public static double svm_predict_values(svm_model model, svm_node[] x, double[] dec_values)
{
int i;
if(model.param.svm_type == svm_parameter.ONE_CLASS ||
model.param.svm_type == svm_parameter.EPSILON_SVR ||
model.param.svm_type == svm_parameter.NU_SVR)
{
double[] sv_coef = model.sv_coef[0];
double sum = 0;
for(i=0;i<model.l;i++)
sum += sv_coef[i] * Kernel.k_function(x,model.SV[i],model.param);
sum -= model.rho[0];
dec_values[0] = sum;
if(model.param.svm_type == svm_parameter.ONE_CLASS)
return (sum>0)?1:-1;
else
return sum;
}
else
{
int nr_class = model.nr_class;
int l = model.l;
double[] kvalue = new double[l];
for(i=0;i<l;i++)
kvalue[i] = Kernel.k_function(x,model.SV[i],model.param);
int[] start = new int[nr_class];
start[0] = 0;
for(i=1;i<nr_class;i++)
start[i] = start[i-1]+model.nSV[i-1];
int[] vote = new int[nr_class];
for(i=0;i<nr_class;i++)
vote[i] = 0;
int p=0;
for(i=0;i<nr_class;i++)
for(int j=i+1;j<nr_class;j++)
{
double sum = 0;
int si = start[i];
int sj = start[j];
int ci = model.nSV[i];
int cj = model.nSV[j];
int k;
double[] coef1 = model.sv_coef[j-1];
double[] coef2 = model.sv_coef[i];
for(k=0;k<ci;k++)
sum += coef1[si+k] * kvalue[si+k];
for(k=0;k<cj;k++)
sum += coef2[sj+k] * kvalue[sj+k];
sum -= model.rho[p];
dec_values[p] = sum;
if(dec_values[p] > 0)
++vote[i];
else
++vote[j];
p++;
}
int vote_max_idx = 0;
for(i=1;i<nr_class;i++)
if(vote[i] > vote[vote_max_idx])
vote_max_idx = i;
return model.label[vote_max_idx];
}
}
public static double svm_predict(svm_model model, svm_node[] x)
{
int nr_class = model.nr_class;
double[] dec_values;
if(model.param.svm_type == svm_parameter.ONE_CLASS ||
model.param.svm_type == svm_parameter.EPSILON_SVR ||
model.param.svm_type == svm_parameter.NU_SVR)
dec_values = new double[1];
else
dec_values = new double[nr_class*(nr_class-1)/2];
double pred_result = svm_predict_values(model, x, dec_values);
return pred_result;
}
public static double svm_predict_probability(svm_model model, svm_node[] x, double[] prob_estimates)
{
if ((model.param.svm_type == svm_parameter.C_SVC || model.param.svm_type == svm_parameter.NU_SVC) &&
model.probA!=null && model.probB!=null)
{
int i;
int nr_class = model.nr_class;
double[] dec_values = new double[nr_class*(nr_class-1)/2];
svm_predict_values(model, x, dec_values);
double min_prob=1e-7;
double[][] pairwise_prob=new double[nr_class][nr_class];
int k=0;
for(i=0;i<nr_class;i++)
for(int j=i+1;j<nr_class;j++)
{
pairwise_prob[i][j]=Math.min(Math.max(sigmoid_predict(dec_values[k],model.probA[k],model.probB[k]),min_prob),1-min_prob);
pairwise_prob[j][i]=1-pairwise_prob[i][j];
k++;
}
multiclass_probability(nr_class,pairwise_prob,prob_estimates);
int prob_max_idx = 0;
for(i=1;i<nr_class;i++)
if(prob_estimates[i] > prob_estimates[prob_max_idx])
prob_max_idx = i;
return model.label[prob_max_idx];
}
else
return svm_predict(model, x);
}
static final String svm_type_table[] =
{
"c_svc","nu_svc","one_class","epsilon_svr","nu_svr",
};
static final String kernel_type_table[]=
{
"linear","polynomial","rbf","sigmoid","precomputed"
};
public static void svm_save_model(String model_file_name, svm_model model) throws IOException
{
DataOutputStream fp = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(model_file_name)));
svm_parameter param = model.param;
fp.writeBytes("svm_type "+svm_type_table[param.svm_type]+"\n");
fp.writeBytes("kernel_type "+kernel_type_table[param.kernel_type]+"\n");
if(param.kernel_type == svm_parameter.POLY)
fp.writeBytes("degree "+param.degree+"\n");
if(param.kernel_type == svm_parameter.POLY ||
param.kernel_type == svm_parameter.RBF ||
param.kernel_type == svm_parameter.SIGMOID)
fp.writeBytes("gamma "+param.gamma+"\n");
if(param.kernel_type == svm_parameter.POLY ||
param.kernel_type == svm_parameter.SIGMOID)
fp.writeBytes("coef0 "+param.coef0+"\n");
int nr_class = model.nr_class;
int l = model.l;
fp.writeBytes("nr_class "+nr_class+"\n");
fp.writeBytes("total_sv "+l+"\n");
{
fp.writeBytes("rho");
for(int i=0;i<nr_class*(nr_class-1)/2;i++)
fp.writeBytes(" "+model.rho[i]);
fp.writeBytes("\n");
}
if(model.label != null)
{
fp.writeBytes("label");
for(int i=0;i<nr_class;i++)
fp.writeBytes(" "+model.label[i]);
fp.writeBytes("\n");
}
if(model.probA != null) // regression has probA only
{
fp.writeBytes("probA");
for(int i=0;i<nr_class*(nr_class-1)/2;i++)
fp.writeBytes(" "+model.probA[i]);
fp.writeBytes("\n");
}
if(model.probB != null)
{
fp.writeBytes("probB");
for(int i=0;i<nr_class*(nr_class-1)/2;i++)
fp.writeBytes(" "+model.probB[i]);
fp.writeBytes("\n");
}
if(model.nSV != null)
{
fp.writeBytes("nr_sv");
for(int i=0;i<nr_class;i++)
fp.writeBytes(" "+model.nSV[i]);
fp.writeBytes("\n");
}
fp.writeBytes("SV\n");
double[][] sv_coef = model.sv_coef;
svm_node[][] SV = model.SV;
for(int i=0;i<l;i++)
{
for(int j=0;j<nr_class-1;j++)
fp.writeBytes(sv_coef[j][i]+" ");
svm_node[] p = SV[i];
if(param.kernel_type == svm_parameter.PRECOMPUTED)
fp.writeBytes("0:"+(int)(p[0].value));
else
for(int j=0;j<p.length;j++)
fp.writeBytes(p[j].index+":"+p[j].value+" ");
fp.writeBytes("\n");
}
fp.close();
}
private static double atof(String s)
{
return Double.valueOf(s).doubleValue();
}
private static int atoi(String s)
{
return Integer.parseInt(s);
}
private static boolean read_model_header(BufferedReader fp, svm_model model)
{
svm_parameter param = new svm_parameter();
model.param = param;
try
{
while(true)
{
String cmd = fp.readLine();
String arg = cmd.substring(cmd.indexOf(' ')+1);
if(cmd.startsWith("svm_type"))
{
int i;
for(i=0;i<svm_type_table.length;i++)
{
if(arg.indexOf(svm_type_table[i])!=-1)
{
param.svm_type=i;
break;
}
}
if(i == svm_type_table.length)
{
System.err.print("unknown svm type.\n");
return false;
}
}
else if(cmd.startsWith("kernel_type"))
{
int i;
for(i=0;i<kernel_type_table.length;i++)
{
if(arg.indexOf(kernel_type_table[i])!=-1)
{
param.kernel_type=i;
break;
}
}
if(i == kernel_type_table.length)
{
System.err.print("unknown kernel function.\n");
return false;
}
}
else if(cmd.startsWith("degree"))
param.degree = atoi(arg);
else if(cmd.startsWith("gamma"))
param.gamma = atof(arg);
else if(cmd.startsWith("coef0"))
param.coef0 = atof(arg);
else if(cmd.startsWith("nr_class"))
model.nr_class = atoi(arg);
else if(cmd.startsWith("total_sv"))
model.l = atoi(arg);
else if(cmd.startsWith("rho"))
{
int n = model.nr_class * (model.nr_class-1)/2;
model.rho = new double[n];
StringTokenizer st = new StringTokenizer(arg);
for(int i=0;i<n;i++)
model.rho[i] = atof(st.nextToken());
}
else if(cmd.startsWith("label"))
{
int n = model.nr_class;
model.label = new int[n];
StringTokenizer st = new StringTokenizer(arg);
for(int i=0;i<n;i++)
model.label[i] = atoi(st.nextToken());
}
else if(cmd.startsWith("probA"))
{
int n = model.nr_class*(model.nr_class-1)/2;
model.probA = new double[n];
StringTokenizer st = new StringTokenizer(arg);
for(int i=0;i<n;i++)
model.probA[i] = atof(st.nextToken());
}
else if(cmd.startsWith("probB"))
{
int n = model.nr_class*(model.nr_class-1)/2;
model.probB = new double[n];
StringTokenizer st = new StringTokenizer(arg);
for(int i=0;i<n;i++)
model.probB[i] = atof(st.nextToken());
}
else if(cmd.startsWith("nr_sv"))
{
int n = model.nr_class;
model.nSV = new int[n];
StringTokenizer st = new StringTokenizer(arg);
for(int i=0;i<n;i++)
model.nSV[i] = atoi(st.nextToken());
}
else if(cmd.startsWith("SV"))
{
break;
}
else
{
System.err.print("unknown text in model file: ["+cmd+"]\n");
return false;
}
}
}
catch(Exception e)
{
return false;
}
return true;
}
public static svm_model svm_load_model(String model_file_name) throws IOException
{
return svm_load_model(new BufferedReader(new FileReader(model_file_name)));
}
public static svm_model svm_load_model(BufferedReader fp) throws IOException
{
// read parameters
svm_model model = new svm_model();
model.rho = null;
model.probA = null;
model.probB = null;
model.label = null;
model.nSV = null;
if (read_model_header(fp, model) == false)
{
System.err.print("ERROR: failed to read model\n");
return null;
}
// read sv_coef and SV
int m = model.nr_class - 1;
int l = model.l;
model.sv_coef = new double[m][l];
model.SV = new svm_node[l][];
for(int i=0;i<l;i++)
{
String line = fp.readLine();
StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");
for(int k=0;k<m;k++)
model.sv_coef[k][i] = atof(st.nextToken());
int n = st.countTokens()/2;
model.SV[i] = new svm_node[n];
for(int j=0;j<n;j++)
{
model.SV[i][j] = new svm_node();
model.SV[i][j].index = atoi(st.nextToken());
model.SV[i][j].value = atof(st.nextToken());
}
}
fp.close();
return model;
}
public static String svm_check_parameter(svm_problem prob, svm_parameter param)
{
// svm_type
int svm_type = param.svm_type;
if(svm_type != svm_parameter.C_SVC &&
svm_type != svm_parameter.NU_SVC &&
svm_type != svm_parameter.ONE_CLASS &&
svm_type != svm_parameter.EPSILON_SVR &&
svm_type != svm_parameter.NU_SVR)
return "unknown svm type";
// kernel_type, degree
int kernel_type = param.kernel_type;
if(kernel_type != svm_parameter.LINEAR &&
kernel_type != svm_parameter.POLY &&
kernel_type != svm_parameter.RBF &&
kernel_type != svm_parameter.SIGMOID &&
kernel_type != svm_parameter.PRECOMPUTED)
return "unknown kernel type";
if(param.gamma < 0)
return "gamma < 0";
if(param.degree < 0)
return "degree of polynomial kernel < 0";
// cache_size,eps,C,nu,p,shrinking
if(param.cache_size <= 0)
return "cache_size <= 0";
if(param.eps <= 0)
return "eps <= 0";
if(svm_type == svm_parameter.C_SVC ||
svm_type == svm_parameter.EPSILON_SVR ||
svm_type == svm_parameter.NU_SVR)
if(param.C <= 0)
return "C <= 0";
if(svm_type == svm_parameter.NU_SVC ||
svm_type == svm_parameter.ONE_CLASS ||
svm_type == svm_parameter.NU_SVR)
if(param.nu <= 0 || param.nu > 1)
return "nu <= 0 or nu > 1";
if(svm_type == svm_parameter.EPSILON_SVR)
if(param.p < 0)
return "p < 0";
if(param.shrinking != 0 &&
param.shrinking != 1)
return "shrinking != 0 and shrinking != 1";
if(param.probability != 0 &&
param.probability != 1)
return "probability != 0 and probability != 1";
if(param.probability == 1 &&
svm_type == svm_parameter.ONE_CLASS)
return "one-class SVM probability output not supported yet";
// check whether nu-svc is feasible
if(svm_type == svm_parameter.NU_SVC)
{
int l = prob.l;
int max_nr_class = 16;
int nr_class = 0;
int[] label = new int[max_nr_class];
int[] count = new int[max_nr_class];
int i;
for(i=0;i<l;i++)
{
int this_label = (int)prob.y[i];
int j;
for(j=0;j<nr_class;j++)
if(this_label == label[j])
{
++count[j];
break;
}
if(j == nr_class)
{
if(nr_class == max_nr_class)
{
max_nr_class *= 2;
int[] new_data = new int[max_nr_class];
System.arraycopy(label,0,new_data,0,label.length);
label = new_data;
new_data = new int[max_nr_class];
System.arraycopy(count,0,new_data,0,count.length);
count = new_data;
}
label[nr_class] = this_label;
count[nr_class] = 1;
++nr_class;
}
}
for(i=0;i<nr_class;i++)
{
int n1 = count[i];
for(int j=i+1;j<nr_class;j++)
{
int n2 = count[j];
if(param.nu*(n1+n2)/2 > Math.min(n1,n2))
return "specified nu is infeasible";
}
}
}
return null;
}
public static int svm_check_probability_model(svm_model model)
{
if (((model.param.svm_type == svm_parameter.C_SVC || model.param.svm_type == svm_parameter.NU_SVC) &&
model.probA!=null && model.probB!=null) ||
((model.param.svm_type == svm_parameter.EPSILON_SVR || model.param.svm_type == svm_parameter.NU_SVR) &&
model.probA!=null))
return 1;
else
return 0;
}
public static void svm_set_print_string_function(svm_print_interface print_func)
{
if (print_func == null)
svm_print_string = svm_print_stdout;
else
svm_print_string = print_func;
}
}
| 63,810 | 21.429174 | 145 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/toolbox/svm/libsvm/svm_problem.java | package toolbox.svm.libsvm;
public class svm_problem implements java.io.Serializable
{
public int l;
public double[] y;
public svm_node[][] x;
}
| 148 | 17.625 | 56 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/toolbox/svm/libsvm/svm_print_interface.java | package toolbox.svm.libsvm;
public interface svm_print_interface
{
public void print(String s);
}
| 99 | 15.666667 | 36 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/toolbox/svm/libsvm/svm_model.java | //
// svm_model
//
package toolbox.svm.libsvm;
public class svm_model implements java.io.Serializable
{
public svm_parameter param; // parameter
public int nr_class; // number of classes, = 2 in regression/one class svm
public int l; // total #SV
public svm_node[][] SV; // SVs (SV[l])
public double[][] sv_coef; // coefficients for SVs in decision functions (sv_coef[k-1][l])
public double[] rho; // constants in decision functions (rho[k*(k-1)/2])
public double[] probA; // pariwise probability information
public double[] probB;
public int[] sv_indices; // sv_indices[0,...,nSV-1] are values in [1,...,num_traning_data] to indicate SVs in the training set
// for classification only
public int[] label; // label of each class (label[k])
public int[] nSV; // number of SVs for each class (nSV[k])
// nSV[0] + nSV[1] + ... + nSV[k-1] = l
};
| 880 | 37.304348 | 133 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/toolbox/svm/libsvm/svm_parameter.java | package toolbox.svm.libsvm;
public class svm_parameter implements Cloneable,java.io.Serializable
{
/* svm_type */
public static final int C_SVC = 0;
public static final int NU_SVC = 1;
public static final int ONE_CLASS = 2;
public static final int EPSILON_SVR = 3;
public static final int NU_SVR = 4;
/* kernel_type */
public static final int LINEAR = 0;
public static final int POLY = 1;
public static final int RBF = 2;
public static final int SIGMOID = 3;
public static final int PRECOMPUTED = 4;
public int svm_type;
public int kernel_type;
public int degree; // for poly
public double gamma; // for poly/rbf/sigmoid
public double coef0; // for poly/sigmoid
// these are for training only
public double cache_size; // in MB
public double eps; // stopping criteria
public double C; // for C_SVC, EPSILON_SVR and NU_SVR
public int nr_weight; // for C_SVC
public int[] weight_label; // for C_SVC
public double[] weight; // for C_SVC
public double nu; // for NU_SVC, ONE_CLASS, and NU_SVR
public double p; // for EPSILON_SVR
public int shrinking; // use the shrinking heuristics
public int probability; // do probability estimates
public Object clone()
{
try
{
return super.clone();
} catch (CloneNotSupportedException e)
{
return null;
}
}
}
| 1,300 | 26.104167 | 68 | java |
tnetwork | tnetwork-master/tnetwork/DCD/externals/DYNAMO/dynamo-master/src/main/java/toolbox/svm/libsvm/svm_node.java | package toolbox.svm.libsvm;
public class svm_node implements java.io.Serializable
{
public int index;
public double value;
}
| 127 | 17.285714 | 53 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/optimize/StochasticMetaAscent.java | package cc.mallet.optimize;
import java.util.Arrays;
import java.util.logging.Logger;
import java.text.DecimalFormat;
import cc.mallet.optimize.Optimizer;
import cc.mallet.types.MatrixOps;
import cc.mallet.util.MalletLogger;
/**
* @author Greg Druck
* @author Kedar Bellare
*/
public class StochasticMetaAscent implements Optimizer.ByBatches {
private static Logger logger =
MalletLogger.getLogger(StochasticMetaAscent.class.getName());
private final int MAX_ITER = 200;
private final double LAMBDA = 1.0;
private final double TOLERANCE = 0.01;
private final double EPS = 1e-10;
private double mu = 0.1;
private int totalIterations = 0;
private double eta_init = 0.03;
private boolean useHessian = true;
private double[] gain;
private double[] gradientTrace;
Optimizable.ByBatchGradient maxable = null;
public StochasticMetaAscent (Optimizable.ByBatchGradient maxable) {
this.maxable = maxable;
}
public void setInitialStep(double step) {
eta_init = step;
}
public void setMu(double m) {
mu = m;
}
public void setUseHessian(boolean flag) {
useHessian = flag;
}
public boolean optimize(int numBatches, int[] batchAssignments) {
return optimize(MAX_ITER,numBatches,batchAssignments);
}
public boolean optimize(int numIterations, int numBatches, int[] batchAssignments) {
int numParameters = maxable.getNumParameters();
double[] parameters = new double[numParameters];
double[] gradient = new double[numParameters];
double[] hessianProduct = new double[numParameters];
// Only initialize these if they are null
// in case someone wants to optimize a
// few iterations at a time.
if (gain == null) {
System.err.println ("StochasticMetaAscent: initialStep="+eta_init+" metaStep="+mu);
gain = new double[numParameters];
Arrays.fill(gain, eta_init);
gradientTrace = new double[numParameters];
}
maxable.getParameters(parameters);
for (int iteration = 0; iteration < numIterations; iteration++) {
double oldApproxValue = 0;
double approxValue = 0;
for (int batch = 0; batch < numBatches; batch++) {
logger.info("Iteration " + (totalIterations + iteration) + ", batch " + batch + " of " + numBatches);
// Get current parameters
maxable.getParameters(parameters);
// Update value and gradient for the current batch
double initialValue = maxable.getBatchValue (batch, batchAssignments);
oldApproxValue += initialValue;
if (Double.isNaN (initialValue)) {
throw new IllegalArgumentException ("NaN in value computation. Probably you need to reduce initialStep or metaStep.");
}
maxable.getBatchValueGradient(gradient,batch,batchAssignments);
// The code below was originally written for stochastic meta
// descent. We are maximizing, so we want ascent. Flip the
// signs on the gradient to make it point downhill.
MatrixOps.timesEquals(gradient, -1);
if (useHessian) {
computeHessianProduct(maxable, parameters, batch, batchAssignments, gradient, gradientTrace, hessianProduct);
}
reportOnVec ("x", parameters);
reportOnVec ("step", gain);
reportOnVec ("grad", gradient);
reportOnVec ("trace", gradientTrace);
// Update learning rates for individual parameters
for (int index = 0; index < numParameters; index++) {
// for the first iteration, this will just be the initial step
// since gradientTrace will be all zeros
gain[index] *= Math.max(0.5, 1 - mu * gradient[index] * gradientTrace[index]);
// adjust parameters based on direction
parameters[index] -= gain[index] * gradient[index];
if (useHessian) {
// adjust gradient trace
gradientTrace[index] = LAMBDA * gradientTrace[index] - gain[index] *
(gradient[index] + LAMBDA * hessianProduct[index]);
}
else {
// adjust gradient trace
gradientTrace[index] = LAMBDA * gradientTrace[index] - gain[index] *
(gradient[index] + LAMBDA * gradientTrace[index]);
}
}
// Set new parameters
maxable.setParameters(parameters);
double finalValue = maxable.getBatchValue (batch, batchAssignments);
approxValue += finalValue;
logger.info ("StochasticMetaAscent: initial value: "+initialValue+" final value:"+finalValue);
}
logger.info("StochasticMetaDescent: Value at iteration (" + (totalIterations + iteration) + ")= " + approxValue);
// converge criteria from GradientAscent and LimitedMemoryBFGS
if (2.0*Math.abs(approxValue-oldApproxValue) <=
TOLERANCE*(Math.abs(approxValue)+Math.abs(oldApproxValue)+EPS)) {
logger.info ("Stochastic Meta Ascent: Value difference "
+Math.abs(approxValue-oldApproxValue)
+" below " + "tolerance; saying converged.");
totalIterations += iteration;
return true;
}
oldApproxValue = approxValue;
}
totalIterations += numIterations;
return false;
}
private void reportOnVec (String s, double[] v)
{
DecimalFormat f = new DecimalFormat ("0.####");
System.out.println ("StochasticMetaAscent: "+s+":"+
" min "+ f.format(MatrixOps.min (v)) +
" max "+ f.format(MatrixOps.max (v)) +
" mean "+ f.format(MatrixOps.mean (v)) +
" 2norm "+ f.format(MatrixOps.twoNorm (v)) +
" abs-norm "+ f.format(MatrixOps.absNorm (v))
);
}
// compute finite difference approximation of the Hessian product
private void computeHessianProduct(Optimizable.ByBatchGradient maxable,
double[] parameters, int batchIndex, int[] batchAssignments,
double[] currentGradient, double[] vector, double[] result) {
int numParameters = maxable.getNumParameters();
double eps = 1.0e-6;
double[] epsGradient = new double[numParameters];
double[] oldParameters = new double[numParameters];
// adjust parameters by (eps * vector) and recompute gradient
System.arraycopy(parameters,0,oldParameters,0,numParameters);
MatrixOps.plusEquals(parameters, vector, eps);
maxable.setParameters(parameters);
maxable.getBatchValueGradient(epsGradient, batchIndex, batchAssignments);
// restore old parameters
maxable.setParameters(oldParameters);
// calculate Hessian product
for (int index = 0; index < result.length; index++) {
result[index] = (-epsGradient[index] - currentGradient[index]) / eps;
}
}
}
| 6,663 | 33.174359 | 129 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/optimize/GradientAscent.java | /* Copyright (C) 2002 Univ. of Massachusetts Amherst, Computer Science Dept.
This file is part of "MALLET" (MAchine Learning for LanguagE Toolkit).
http://www.cs.umass.edu/~mccallum/mallet
This software is provided under the terms of the Common Public License,
version 1.0, as published by http://www.opensource.org. For further
information, see the file `LICENSE' included with this distribution. */
/**
@author Andrew McCallum <a href="mailto:[email protected]">[email protected]</a>
*/
package cc.mallet.optimize;
import java.util.logging.*;
import cc.mallet.types.MatrixOps;
import cc.mallet.util.MalletLogger;
public class GradientAscent implements Optimizer
{
private static Logger logger = MalletLogger.getLogger(GradientAscent.class.getName());
boolean converged = false;
Optimizable.ByGradientValue optimizable;
private double maxStep = 1.0;
private OptimizerEvaluator.ByGradient eval;
static final double initialStepSize = 0.2;
double tolerance = 0.001;
int maxIterations = 200;
LineOptimizer.ByGradient lineMaximizer;
double stpmax = 100;
// "eps" is a small number to rectify the special case of converging
// to exactly zero function value
final double eps = 1.0e-10;
double step = initialStepSize;
public GradientAscent (Optimizable.ByGradientValue function)
{
optimizable = function;
lineMaximizer = new BackTrackLineSearch(function);
}
public Optimizable getOptimizable () { return this.optimizable; }
public boolean isConverged () { return converged; }
public LineOptimizer.ByGradient getLineMaximizer ()
{
return lineMaximizer;
}
/* Tricky: this is now set at GradientAscent construction time. How to set it later?
* What to pass as an argument here? The lineMaximizer needs the function at the time of its construction!
public void setLineMaximizer (LineOptimizer.ByGradient lineMaximizer)
{
this.lineMaximizer = lineMaximizer;
}*/
public double getInitialStepSize ()
{
return initialStepSize;
}
public void setInitialStepSize (double initialStepSize)
{
step = initialStepSize;
}
public double getStpmax ()
{
return stpmax;
}
public void setStpmax (double stpmax)
{
this.stpmax = stpmax;
}
public boolean optimize ()
{
return optimize (maxIterations);
}
public boolean optimize (int numIterations)
{
int iterations;
double fret;
double fp = optimizable.getValue ();
double[] xi = new double [optimizable.getNumParameters()];
optimizable.getValueGradient(xi);
for (iterations = 0; iterations < numIterations; iterations++) {
logger.info ("At iteration "+iterations+", cost = "+fp+", scaled = "+maxStep+" step = "+step+", gradient infty-norm = "+MatrixOps.infinityNorm (xi));
// Ensure step not too large
double sum = MatrixOps.twoNorm (xi);
if (sum > stpmax) {
logger.info ("*** Step 2-norm "+sum+" greater than max "+stpmax+" Scaling...");
MatrixOps.timesEquals (xi,stpmax/sum);
}
step = lineMaximizer.optimize (xi, step);
fret = optimizable.getValue ();
if (2.0*Math.abs(fret-fp) <= tolerance*(Math.abs(fret)+Math.abs(fp)+eps)) {
logger.info ("Gradient Ascent: Value difference "+Math.abs(fret-fp)+" below " +
"tolerance; saying converged.");
converged = true;
return true;
}
fp = fret;
optimizable.getValueGradient(xi);
if (eval != null) {
eval.evaluate (optimizable, iterations);
}
}
return false;
}
public void setMaxStepSize (double v)
{
maxStep = v;
}
public void setEvaluator (OptimizerEvaluator.ByGradient eval)
{
this.eval = eval;
}
}
| 3,688 | 26.325926 | 152 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/optimize/LineOptimizer.java | /* Copyright (C) 2002 Univ. of Massachusetts Amherst, Computer Science Dept.
This file is part of "MALLET" (MAchine Learning for LanguagE Toolkit).
http://www.cs.umass.edu/~mccallum/mallet
This software is provided under the terms of the Common Public License,
version 1.0, as published by http://www.opensource.org. For further
information, see the file `LICENSE' included with this distribution. */
/**
@author Andrew McCallum <a href="mailto:[email protected]">[email protected]</a>
Maximize a function projected along a line.
*/
package cc.mallet.optimize;
/** Optimize, constrained to move parameters along the direction of a specified line.
* The Optimizable object would be either Optimizable.ByValue or Optimizable.ByGradient. */
public interface LineOptimizer
{
/** Returns the last step size used. */
public double optimize (double[] line, double initialStep);
public interface ByGradient {
/** Returns the last step size used. */
public double optimize (double[] line, double initialStep);
}
}
| 1,057 | 31.060606 | 91 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/optimize/InvalidOptimizableException.java | /* Copyright (C) 2006 Univ. of Massachusetts Amherst, Computer Science Dept.
This file is part of "MALLET" (MAchine Learning for LanguagE Toolkit).
http://mallet.cs.umass.edu/
This software is provided under the terms of the Common Public License,
version 1.0, as published by http://www.opensource.org. For further
information, see the file `LICENSE' included with this distribution. */
package cc.mallet.optimize;
/**
* Exception thrown by optimization algorithms, when the problem is usually
* due to a problem with the given Maximizable instance.
* <p>
* If the optimizer throws this in your code, usually there are two possible
* causes: (a) you are computing the gradients approximately, (b) your value
* function and gradient do not match (this can be checking using
* @link{edu.umass.cs.mallet.base.maximize.tests.TestMaximizable}.
*
* Created: Feb 1, 2006
*
* @author <A HREF="mailto:[email protected]>[email protected]</A>
* @version $Id: InvalidMaximizableException.java,v 1.1 2007/10/22 21:37:39 mccallum Exp $
*/
public class InvalidOptimizableException extends OptimizationException {
public InvalidOptimizableException ()
{
}
public InvalidOptimizableException (String message)
{
super (message);
}
public InvalidOptimizableException (String message, Throwable cause)
{
super (message, cause);
}
public InvalidOptimizableException (Throwable cause)
{
super (cause);
}
}
| 1,467 | 32.363636 | 90 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/optimize/BackTrackLineSearch.java | /* Copyright (C) 2002 Univ. of Massachusetts Amherst, Computer Science Dept.
This file is part of "MALLET" (MAchine Learning for LanguagE Toolkit).
http://www.cs.umass.edu/~mccallum/mallet
This software is provided under the terms of the Common Public License,
version 1.0, as published by http://www.opensource.org. For further
information, see the file `LICENSE' included with this distribution. */
/**
@author Aron Culotta <a href="mailto:[email protected]">[email protected]</a>
*/
/**
Numerical Recipes in C: p.385. lnsrch. A simple backtracking line
search. No attempt at accurately finding the true minimum is
made. The goal is only to ensure that BackTrackLineSearch will
return a position of higher value.
*/
package cc.mallet.optimize;
import java.util.logging.*;
import java.util.Arrays;
import cc.mallet.fst.CRF;
import cc.mallet.optimize.LineOptimizer;
import cc.mallet.optimize.Optimizable;
import cc.mallet.types.MatrixOps;
//"Line Searches and Backtracking", p385, "Numeric Recipes in C"
public class BackTrackLineSearch implements LineOptimizer.ByGradient
{
private static Logger logger = Logger.getLogger(BackTrackLineSearch.class.getName());
Optimizable.ByGradientValue function;
public BackTrackLineSearch (Optimizable.ByGradientValue optimizable) {
this.function = optimizable;
}
final int maxIterations = 100;
final double stpmax = 100;
final double EPS = 3.0e-12;
// termination conditions: either
// a) abs(delta x/x) < REL_TOLX for all coordinates
// b) abs(delta x) < ABS_TOLX for all coordinates
// c) sufficient function increase (uses ALF)
private double relTolx = 1e-7;
private double absTolx = 1e-4; // tolerance on absolute value difference
final double ALF = 1e-4;
/**
* Sets the tolerance of relative diff in function value.
* Line search converges if <tt>abs(delta x / x) < tolx</tt>
* for all coordinates. */
public void setRelTolx (double tolx) { relTolx = tolx; }
/**
* Sets the tolerance of absolute diff in function value.
* Line search converges if <tt>abs(delta x) < tolx</tt>
* for all coordinates. */
public void setAbsTolx (double tolx) { absTolx = tolx; }
// initialStep is ignored. This is b/c if the initial step is not 1.0,
// it sometimes confuses the backtracking for reasons I don't
// understand. (That is, the jump gets LARGER on iteration 1.)
// returns fraction of step size (alam) if found a good step
// returns 0.0 if could not step in direction
public double optimize (double[] line, double initialStep)
{
double[] g, x, oldParameters;
double slope, newSlope, temp, test, alamin, alam, alam2, tmplam;
double rhs1, rhs2, a, b, disc, oldAlam;
double f, fold, f2;
g = new double[function.getNumParameters()]; // gradient
x = new double[function.getNumParameters()]; // parameters
oldParameters = new double[function.getNumParameters()];
function.getParameters (x);
System.arraycopy (x, 0, oldParameters, 0, x.length);
function.getValueGradient (g);
alam2 = tmplam = 0.0;
f2 = fold = function.getValue();
if (logger.isLoggable(Level.FINE)) {
logger.fine ("ENTERING BACKTRACK\n");
logger.fine("Entering BackTrackLnSrch, value="+fold+",\ndirection.oneNorm:"
+ MatrixOps.oneNorm(line) + " direction.infNorm:"+MatrixOps.infinityNorm(line));
}
assert (!MatrixOps.isNaN(g));
double sum = MatrixOps.twoNorm(line);
if(sum > stpmax) {
logger.warning("attempted step too big. scaling: sum="+sum+
", stpmax="+stpmax);
MatrixOps.timesEquals(line, stpmax/sum);
}
newSlope = slope = MatrixOps.dotProduct (g, line);
logger.fine("slope="+slope);
if (slope<0)
throw new InvalidOptimizableException ("Slope = " + slope + " is negative");
if (slope == 0)
throw new InvalidOptimizableException ("Slope = " + slope + " is zero");
// find maximum lambda
// converge when (delta x) / x < REL_TOLX for all coordinates.
// the largest step size that triggers this threshold is
// precomputed and saved in alamin
test = 0.0;
for(int i=0; i<oldParameters.length; i++) {
temp = Math.abs(line[i]) /
Math.max(Math.abs(oldParameters[i]), 1.0);
if(temp > test) test = temp;
}
alamin = relTolx/test;
alam = 1.0;
oldAlam = 0.0;
int iteration = 0;
// look for step size in direction given by "line"
for(iteration=0; iteration < maxIterations; iteration++) {
// x = oldParameters + alam*line
// initially, alam = 1.0, i.e. take full Newton step
logger.fine("BackTrack loop iteration "+iteration+": alam="+
alam+" oldAlam="+oldAlam);
logger.fine ("before step, x.1norm: " + MatrixOps.oneNorm(x) +
"\nalam: " + alam + "\noldAlam: " + oldAlam);
assert(alam != oldAlam) : "alam == oldAlam";
MatrixOps.plusEquals(x, line, alam - oldAlam); // step
logger.fine ("after step, x.1norm: " + MatrixOps.oneNorm(x));
// check for convergence
//convergence on delta x
if ((alam < alamin) || smallAbsDiff (oldParameters, x)) {
// if ((alam < alamin)) {
function.setParameters(oldParameters);
f = function.getValue();
logger.warning("EXITING BACKTRACK: Jump too small (alamin="+alamin+"). Exiting and using xold. Value="+f);
return 0.0;
}
function.setParameters(x);
oldAlam = alam;
f = function.getValue();
logger.fine("value="+f);
// sufficient function increase (Wolf condition)
if(f >= fold+ALF*alam*slope) {
logger.fine("EXITING BACKTRACK: value="+f);
if (f<fold)
throw new IllegalStateException
("Function did not increase: f=" + f +
" < " + fold + "=fold");
return alam;
}
// if value is infinite, i.e. we've
// jumped to unstable territory, then scale down jump
else if(Double.isInfinite(f) || Double.isInfinite(f2)) {
logger.warning ("Value is infinite after jump " + oldAlam + ". f="+f+", f2="+f2+". Scaling back step size...");
tmplam = .2 * alam;
if(alam < alamin) { //convergence on delta x
function.setParameters(oldParameters);
f = function.getValue();
logger.warning("EXITING BACKTRACK: Jump too small. Exiting and using xold. Value="+f);
return 0.0;
}
}
else { // backtrack
if(alam == 1.0) // first time through
tmplam = -slope/(2.0*(f-fold-slope));
else {
rhs1 = f-fold-alam*slope;
rhs2 = f2-fold-alam2*slope;
assert((alam - alam2) != 0): "FAILURE: dividing by alam-alam2. alam="+alam;
a = (rhs1/(alam*alam)-rhs2/(alam2*alam2))/(alam-alam2);
b = (-alam2*rhs1/(alam*alam)+alam*rhs2/(alam2*alam2))/(alam-alam2);
if(a == 0.0)
tmplam = -slope/(2.0*b);
else {
disc = b*b-3.0*a*slope;
if(disc < 0.0) {
tmplam = .5 * alam;
}
else if (b <= 0.0)
tmplam = (-b+Math.sqrt(disc))/(3.0*a);
else tmplam = -slope/(b+Math.sqrt(disc));
}
if (tmplam > .5*alam)
tmplam = .5*alam; // lambda <= .5 lambda_1
}
}
alam2 = alam;
f2 = f;
logger.fine("tmplam:"+tmplam);
alam = Math.max(tmplam, .1*alam); // lambda >= .1*Lambda_1
}
if(iteration >= maxIterations)
throw new IllegalStateException ("Too many iterations.");
return 0.0;
}
// returns true iff we've converged based on absolute x difference
private boolean smallAbsDiff (double[] x, double[] xold)
{
for (int i = 0; i < x.length; i++) {
if (Math.abs (x[i] - xold[i]) > absTolx) {
return false;
}
}
return true;
}
}
| 7,473 | 33.127854 | 115 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/optimize/ConjugateGradient.java | /* Copyright (C) 2002 Univ. of Massachusetts Amherst, Computer Science Dept.
This file is part of "MALLET" (MAchine Learning for LanguagE Toolkit).
http://www.cs.umass.edu/~mccallum/mallet
This software is provided under the terms of the Common Public License,
version 1.0, as published by http://www.opensource.org. For further
information, see the file `LICENSE' included with this distribution. */
/**
@author Andrew McCallum <a href="mailto:[email protected]">[email protected]</a>
*/
package cc.mallet.optimize;
import java.util.logging.*;
import cc.mallet.optimize.LineOptimizer;
import cc.mallet.optimize.Optimizable;
import cc.mallet.optimize.tests.TestOptimizable;
import cc.mallet.types.MatrixOps;
import cc.mallet.util.MalletLogger;
// Conjugate Gradient, Polak and Ribiere version
// from "Numeric Recipes in C", Section 10.6.
public class ConjugateGradient implements Optimizer
{
private static Logger logger = MalletLogger.getLogger(ConjugateGradient.class.getName());
boolean converged = false;
Optimizable.ByGradientValue optimizable;
LineOptimizer.ByGradient lineMaximizer;
// xxx If this is too big, we can get inconsistent value and gradient in MaxEntTrainer
// Investigate!!!
double initialStepSize = 0.01;
double tolerance = 0.0001;
int maxIterations = 1000;
// "eps" is a small number to recitify the special case of converging
// to exactly zero function value
final double eps = 1.0e-10;
private OptimizerEvaluator.ByGradient eval;
public ConjugateGradient (Optimizable.ByGradientValue function, double initialStepSize)
{
this.initialStepSize = initialStepSize;
this.optimizable = function;
this.lineMaximizer = new BackTrackLineSearch (function);
// Alternative: = new GradientBracketLineMaximizer (function);
}
public ConjugateGradient (Optimizable.ByGradientValue function)
{
this (function, 0.01);
}
public Optimizable getOptimizable () { return this.optimizable; }
public boolean isConverged () { return converged; }
public void setEvaluator (OptimizerEvaluator.ByGradient eval)
{
this.eval = eval;
}
public void setLineMaximizer (LineOptimizer.ByGradient lineMaximizer)
{
this.lineMaximizer = lineMaximizer;
}
public void setInitialStepSize (double initialStepSize) { this.initialStepSize = initialStepSize; }
public double getInitialStepSize () { return this.initialStepSize; }
public double getStepSize () { return step; }
// The state of a conjugate gradient search
double fp, gg, gam, dgg, step, fret;
double[] xi, g, h;
int j, iterations;
public boolean optimize ()
{
return optimize (maxIterations);
}
public void setTolerance(double t) {
tolerance = t;
}
public boolean optimize (int numIterations)
{
if (converged)
return true;
int n = optimizable.getNumParameters();
double prevStepSize = initialStepSize;
boolean searchingGradient = true;
if (xi == null) {
fp = optimizable.getValue ();
xi = new double[n];
g = new double[n];
h = new double[n];
optimizable.getValueGradient (xi);
System.arraycopy (xi, 0, g, 0, n);
System.arraycopy (xi, 0, h, 0, n);
step = initialStepSize;
iterations = 0;
}
for (int iterationCount = 0; iterationCount < numIterations; iterationCount++) {
logger.info ("ConjugateGradient: At iteration "+iterations+", cost = "+fp);
try {
prevStepSize = step;
step = lineMaximizer.optimize (xi, step);
} catch (IllegalArgumentException e) {
System.out.println ("ConjugateGradient caught "+e.toString());
TestOptimizable.testValueAndGradientCurrentParameters(optimizable);
TestOptimizable.testValueAndGradientInDirection(optimizable, xi);
//System.out.println ("Trying ConjugateGradient restart.");
//return this.maximize (maxable, numIterations);
}
if (step == 0) {
if (searchingGradient) {
System.err.println ("ConjugateGradient converged: Line maximizer got step 0 in gradient direction. "
+"Gradient absNorm="+MatrixOps.absNorm(xi));
converged = true;
return true;
} else
System.err.println ("Line maximizer got step 0. Probably pointing up hill. Resetting to gradient. "
+"Gradient absNorm="+MatrixOps.absNorm(xi));
// Copied from above (how to code this better? I want GoTo)
fp = optimizable.getValue();
optimizable.getValueGradient (xi);
searchingGradient = true;
System.arraycopy (xi, 0, g, 0, n);
System.arraycopy (xi, 0, h, 0, n);
step = prevStepSize;
continue;
}
fret = optimizable.getValue();
// This termination provided by "Numeric Recipes in C".
if (2.0*Math.abs(fret-fp) <= tolerance*(Math.abs(fret)+Math.abs(fp)+eps)) {
System.out.println ("ConjugateGradient converged: old value= "+fp+" new value= "+fret+" tolerance="+tolerance);
converged = true;
return true;
}
fp = fret;
optimizable.getValueGradient(xi);
logger.info ("Gradient infinityNorm = "+MatrixOps.infinityNorm(xi));
// This termination provided by McCallum
if (MatrixOps.infinityNorm(xi) < tolerance) {
System.err.println ("ConjugateGradient converged: maximum gradient component "+MatrixOps.infinityNorm(xi)
+", less than "+tolerance);
converged = true;
return true;
}
dgg = gg = 0.0;
double gj, xj;
for (j = 0; j < xi.length; j++) {
gj = g[j];
gg += gj * gj;
xj = -xi[j];
dgg = (xj + gj) * xj;
}
if (gg == 0.0) {
System.err.println ("ConjugateGradient converged: gradient is exactly zero.");
converged = true;
return true; // In unlikely case that gradient is exactly zero, then we are done
}
gam = dgg/gg;
double hj;
for (j = 0; j < xi.length; j++) {
xj = xi[j];
g[j] = xj;
hj = h[j];
hj = xj + gam * hj;
h[j] = hj;
}
assert (!MatrixOps.isNaN(h));
MatrixOps.set (xi, h);
searchingGradient = false;
iterations++;
if (iterations > maxIterations) {
System.err.println("Too many iterations in ConjugateGradient.java");
converged = true;
return true;
//throw new IllegalStateException ("Too many iterations.");
}
if (eval != null)
eval.evaluate (optimizable, iterations);
}
return false;
}
public void reset () { xi = null; }
}
| 6,495 | 31.158416 | 119 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/optimize/OptimizationException.java | /* Copyright (C) 2002 Univ. of Massachusetts Amherst, Computer Science Dept.
This file is part of "MALLET" (MAchine Learning for LanguagE Toolkit).
http://www.cs.umass.edu/~mccallum/mallet
This software is provided under the terms of the Common Public License,
version 1.0, as published by http://www.opensource.org. For further
information, see the file `LICENSE' included with this distribution. */
/**
*/
package cc.mallet.optimize;
/**
* General exception thrown by optimization algorithms when there
* is an optimization-specific problem. For example, an exception
* might be thrown when the gradient is sufficiently large but
* no step is possible in that direction.
*
* @author Jerod Weinman <a href="mailto:[email protected]">[email protected]</a>
* @version $Id: OptimizationException.java,v 1.1 2007/10/22 21:37:39 mccallum Exp $
*/
public class OptimizationException extends RuntimeException {
public OptimizationException ()
{
super ();
}
public OptimizationException (String message)
{
super (message);
}
public OptimizationException (String message, Throwable cause)
{
super (message, cause);
}
public OptimizationException (Throwable cause)
{
super (cause);
}
}
| 1,262 | 27.066667 | 88 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/optimize/Optimizer.java | /* Copyright (C) 2002 Univ. of Massachusetts Amherst, Computer Science Dept.
This file is part of "MALLET" (MAchine Learning for LanguagE Toolkit).
http://www.cs.umass.edu/~mccallum/mallet
This software is provided under the terms of the Common Public License,
version 1.0, as published by http://www.opensource.org. For further
information, see the file `LICENSE' included with this distribution. */
/**
@author Andrew McCallum <a href="mailto:[email protected]">[email protected]</a>
*/
package cc.mallet.optimize;
public interface Optimizer
{
// Returns true if it has converged
// TODO change this to "optimize"
public boolean optimize ();
public boolean optimize (int numIterations);
public boolean isConverged();
public Optimizable getOptimizable();
@Deprecated // Figure out the right interface for this. It is odd that 'sampleAssignments' reaches into InstanceList indices
public interface ByBatches {
public boolean optimize (int numSamples, int[] sampleAssigments);
public boolean optimize (int numIterations, int numSamples, int[] sampleAssignments);
}
// Rest below is deprecated
/*
public interface ByValue {
// Returns true if it has converged
public boolean maximize (Optimizable.ByValue maxable);
public boolean maximize (Optimizable.ByValue maxable, int numIterations);
}
public interface ByGradient {
// Returns true if it has converged
public boolean maximize (Optimizable.ByValue maxable);
public boolean maximize (Optimizable.ByValue maxable, int numIterations);
}
public interface ByValueGradient {
// Returns true if it has converged
public boolean maximize (Optimizable.ByGradientValue maxable);
public boolean maximize (Optimizable.ByGradientValue maxable, int numIterations);
}
public interface ByHessian {
// Returns true if it has converged
public boolean maximize (Optimizable.ByHessian minable);
public boolean maximize (Optimizable.ByHessian minable, int numIterations);
}
public interface ByGISUpdate {
// Returns true if it has converged
public boolean maximize (Optimizable.ByGISUpdate maxable);
public boolean maximize (Optimizable.ByGISUpdate maxable, int numIterations);
}
public interface ByBatchGradient {
// Returns true if it has converged
public boolean maximize (Optimizable.ByBatchGradient maxable, int numSamples, int[] sampleAssigments);
public boolean maximize (Optimizable.ByBatchGradient maxable, int numIterations, int numSamples, int[] sampleAssignments);
}
*/
}
| 2,560 | 32.697368 | 126 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/optimize/LimitedMemoryBFGS.java | /* Copyright (C) 2002 Univ. of Massachusetts Amherst, Computer Science Dept.
This file is part of "MALLET" (MAchine Learning for LanguagE Toolkit).
http://www.cs.umass.edu/~mccallum/mallet
This software is provided under the terms of the Common Public License,
version 1.0, as published by http://www.opensource.org. For further
information, see the file `LICENSE' included with this distribution. */
/**
@author Aron Culotta <a href="mailto:[email protected]">[email protected]</a>
*/
/**
Limited Memory BFGS, as described in Byrd, Nocedal, and Schnabel,
"Representations of Quasi-Newton Matrices and Their Use in Limited
Memory Methods"
*/
package cc.mallet.optimize;
import java.util.logging.*;
import java.util.LinkedList;
import cc.mallet.optimize.BackTrackLineSearch;
import cc.mallet.optimize.LineOptimizer;
import cc.mallet.optimize.Optimizable;
import cc.mallet.types.MatrixOps;
import cc.mallet.util.MalletLogger;
public class LimitedMemoryBFGS implements Optimizer
{
private static Logger logger = MalletLogger.getLogger("edu.umass.cs.mallet.base.ml.maximize.LimitedMemoryBFGS");
boolean converged = false;
Optimizable.ByGradientValue optimizable;
final int maxIterations = 1000;
// xxx need a more principled stopping point
//final double tolerance = .0001;
private double tolerance = .0001;
final double gradientTolerance = .001;
final double eps = 1.0e-5;
// The number of corrections used in BFGS update
// ideally 3 <= m <= 7. Larger m means more cpu time, memory.
final int m = 4;
// Line search function
private LineOptimizer.ByGradient lineMaximizer;
public LimitedMemoryBFGS (Optimizable.ByGradientValue function) {
this.optimizable = function;
lineMaximizer = new BackTrackLineSearch (function);
}
public Optimizable getOptimizable () { return this.optimizable; }
public boolean isConverged () { return converged; }
/*
public void setLineMaximizer (LineOptimizer.ByGradient maxer)
{
lineMaximizer = maxer;
}*/
// State of search
// g = gradient
// s = list of m previous "parameters" values
// y = list of m previous "g" values
// rho = intermediate calculation
double [] g, oldg, direction, parameters, oldParameters;
LinkedList s = new LinkedList();
LinkedList y = new LinkedList();
LinkedList rho = new LinkedList();
double [] alpha;
static double step = 1.0;
int iterations;
private OptimizerEvaluator.ByGradient eval = null;
// CPAL - added this
public void setTolerance(double newtol) {
this.tolerance = newtol;
}
public void setEvaluator (OptimizerEvaluator.ByGradient eval) { this.eval = eval; }
public int getIteration () {
return iterations;
}
public boolean optimize ()
{
return optimize (Integer.MAX_VALUE);
}
public boolean optimize (int numIterations)
{
double initialValue = optimizable.getValue();
logger.fine("Entering L-BFGS.optimize(). Initial Value="+initialValue);
if(g==null) { //first time through
logger.fine("First time through L-BFGS");
iterations = 0;
s = new LinkedList();
y = new LinkedList();
rho = new LinkedList();
alpha = new double[m];
for(int i=0; i<m; i++)
alpha[i] = 0.0;
parameters = new double[optimizable.getNumParameters()];
oldParameters = new double[optimizable.getNumParameters()];
g = new double[optimizable.getNumParameters()];
oldg = new double[optimizable.getNumParameters()];
direction = new double[optimizable.getNumParameters()];
optimizable.getParameters (parameters);
System.arraycopy (parameters, 0, oldParameters, 0, parameters.length);
optimizable.getValueGradient (g);
System.arraycopy (g, 0, oldg, 0, g.length);
System.arraycopy (g, 0, direction, 0, g.length);
if (MatrixOps.absNormalize (direction) == 0) {
logger.info("L-BFGS initial gradient is zero; saying converged");
g = null;
converged = true;
return true;
}
logger.fine ("direction.2norm: " + MatrixOps.twoNorm (direction));
MatrixOps.timesEquals(direction, 1.0 / MatrixOps.twoNorm(direction));
// make initial jump
logger.fine ("before initial jump: \ndirection.2norm: " +
MatrixOps.twoNorm (direction) + " \ngradient.2norm: " +
MatrixOps.twoNorm (g) + "\nparameters.2norm: " +
MatrixOps.twoNorm(parameters));
//TestMaximizable.testValueAndGradientInDirection (maxable, direction);
step = lineMaximizer.optimize(direction, step);
if (step == 0.0) {// could not step in this direction.
// give up and say converged.
g = null; // reset search
step = 1.0;
throw new OptimizationException("Line search could not step in the current direction. " +
"(This is not necessarily cause for alarm. Sometimes this happens close to the maximum," +
" where the function may be very flat.)");
//return false;
}
optimizable.getParameters (parameters);
optimizable.getValueGradient(g);
logger.fine ("after initial jump: \ndirection.2norm: " +
MatrixOps.twoNorm (direction) + " \ngradient.2norm: "
+ MatrixOps.twoNorm (g));
}
for(int iterationCount = 0; iterationCount < numIterations;
iterationCount++) {
double value = optimizable.getValue();
logger.fine("L-BFGS iteration="+iterationCount
+", value="+value+" g.twoNorm: "+MatrixOps.twoNorm(g)+
" oldg.twoNorm: "+MatrixOps.twoNorm(oldg));
// get difference between previous 2 gradients and parameters
double sy = 0.0;
double yy = 0.0;
for (int i=0; i < oldParameters.length; i++) {
// -inf - (-inf) = 0; inf - inf = 0
if (Double.isInfinite(parameters[i]) &&
Double.isInfinite(oldParameters[i]) &&
(parameters[i]*oldParameters[i] > 0))
oldParameters[i] = 0.0;
else
oldParameters[i] = parameters[i] - oldParameters[i];
if (Double.isInfinite(g[i]) &&
Double.isInfinite(oldg[i]) &&
(g[i]*oldg[i] > 0))
oldg[i] = 0.0;
else oldg[i] = g[i] - oldg[i];
sy += oldParameters[i] * oldg[i]; // si * yi
yy += oldg[i]*oldg[i];
direction[i] = g[i];
}
if ( sy > 0 ) {
throw new InvalidOptimizableException ("sy = "+sy+" > 0" );
}
double gamma = sy / yy; // scaling factor
if ( gamma>0 )
throw new InvalidOptimizableException ("gamma = "+gamma+" > 0" );
push (rho, 1.0/sy);
push (s, oldParameters);
push (y, oldg);
// calculate new direction
assert (s.size() == y.size()) :
"s.size: " + s.size() + " y.size: " + y.size();
for(int i = s.size() - 1; i >= 0; i--) {
alpha[i] = ((Double)rho.get(i)).doubleValue() *
MatrixOps.dotProduct ( (double[])s.get(i), direction);
MatrixOps.plusEquals (direction, (double[])y.get(i),
-1.0 * alpha[i]);
}
MatrixOps.timesEquals(direction, gamma);
for(int i = 0; i < y.size(); i++) {
double beta = (((Double)rho.get(i)).doubleValue()) *
MatrixOps.dotProduct((double[])y.get(i), direction);
MatrixOps.plusEquals(direction,(double[])s.get(i),
alpha[i] - beta);
}
for (int i=0; i < oldg.length; i++) {
oldParameters[i] = parameters[i];
oldg[i] = g[i];
direction[i] *= -1.0;
}
logger.fine ("before linesearch: direction.gradient.dotprod: "+
MatrixOps.dotProduct(direction,g)+"\ndirection.2norm: " +
MatrixOps.twoNorm (direction) + "\nparameters.2norm: " +
MatrixOps.twoNorm(parameters));
//TestMaximizable.testValueAndGradientInDirection (maxable, direction);
step = lineMaximizer.optimize(direction, step);
if (step == 0.0) { // could not step in this direction.
g = null; // reset search
step = 1.0;
// xxx Temporary test; passed OK
// TestMaximizable.testValueAndGradientInDirection (maxable, direction);
throw new OptimizationException("Line search could not step in the current direction. " +
"(This is not necessarily cause for alarm. Sometimes this happens close to the maximum," +
" where the function may be very flat.)");
// return false;
}
optimizable.getParameters (parameters);
optimizable.getValueGradient(g);
logger.fine ("after linesearch: direction.2norm: " +
MatrixOps.twoNorm (direction));
double newValue = optimizable.getValue();
// Test for terminations
if(2.0*Math.abs(newValue-value) <= tolerance*
(Math.abs(newValue)+Math.abs(value) + eps)){
logger.info("Exiting L-BFGS on termination #1:\nvalue difference below tolerance (oldValue: " + value + " newValue: " + newValue);
converged = true;
return true;
}
double gg = MatrixOps.twoNorm(g);
if(gg < gradientTolerance) {
logger.fine("Exiting L-BFGS on termination #2: \ngradient="+gg+" < "+gradientTolerance);
converged = true;
return true;
}
if(gg == 0.0) {
logger.fine("Exiting L-BFGS on termination #3: \ngradient==0.0");
converged = true;
return true;
}
logger.fine("Gradient = "+gg);
iterations++;
if (iterations > maxIterations) {
System.err.println("Too many iterations in L-BFGS.java. Continuing with current parameters.");
converged = true;
return true;
//throw new IllegalStateException ("Too many iterations.");
}
//end of iteration. call evaluator
if (eval != null && !eval.evaluate (optimizable, iterationCount)) {
logger.fine ("Exiting L-BFGS on termination #4: evaluator returned false.");
converged = true;
return false;
}
}
return false;
}
/** Resets the previous gradients and values that are used to
* approximate the Hessian. NOTE - If the {@link Optimizable} object
* is modified externally, this method should be called to avoid
* IllegalStateExceptions. */
public void reset () {
g = null;
}
/**
* Pushes a new object onto the queue l
* @param l linked list queue of Matrix obj's
* @param toadd matrix to push onto queue
*/
private void push(LinkedList l, double[] toadd) {
assert(l.size() <= m);
if(l.size() == m) {
// remove oldest matrix and add newset to end of list.
// to make this more efficient, actually overwrite
// memory of oldest matrix
// this overwrites the oldest matrix
double[] last = (double[]) l.get(0);
System.arraycopy(toadd, 0, last, 0, toadd.length);
Object ptr = last;
// this readjusts the pointers in the list
for(int i=0; i<l.size()-1; i++)
l.set(i, (double[])l.get(i+1));
l.set(m-1, ptr);
}
else {
double [] newArray = new double[toadd.length];
System.arraycopy (toadd, 0, newArray, 0, toadd.length);
l.addLast(newArray);
}
}
/**
* Pushes a new object onto the queue l
* @param l linked list queue of Double obj's
* @param toadd double value to push onto queue
*/
private void push(LinkedList l, double toadd) {
assert(l.size() <= m);
if(l.size() == m) { //pop old double and add new
l.removeFirst();
l.addLast(new Double(toadd));
}
else
l.addLast(new Double(toadd));
}
}
| 10,851 | 31.785498 | 134 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/optimize/OptimizableCollection.java | package cc.mallet.optimize;
import java.util.ArrayList;
import cc.mallet.types.MatrixOps;
public class OptimizableCollection {
public class ByGradientValue implements Optimizable.ByGradientValue
{
ArrayList<Optimizable.ByGradientValue> optimizables;
public ByGradientValue (Optimizable.ByGradientValue... ops) {
optimizables = new ArrayList<Optimizable.ByGradientValue>(ops.length);
for (Optimizable.ByGradientValue o : ops)
optimizables.add(o);
}
public void getValueGradient (double[] buffer) {
double[] b2 = new double[buffer.length];
for (Optimizable.ByGradientValue o : optimizables) {
MatrixOps.setAll(b2, 0);
o.getValueGradient(b2);
MatrixOps.plusEquals(buffer, b2);
}
}
public double getValue () {
double ret = 0;
for (Optimizable.ByGradientValue o : optimizables)
ret += o.getValue();
return ret;
}
// Here we rely on all optimizables pointing to the same set of parameters!
public int getNumParameters() {
return optimizables.get(0).getNumParameters();
}
public double getParameter(int index) {
return optimizables.get(0).getParameter(index);
}
public void getParameters(double[] buffer) {
optimizables.get(0).getParameters(buffer);
}
public void setParameter(int index, double value) {
optimizables.get(0).setParameter(index, value);
}
public void setParameters(double[] params) {
optimizables.get(0).setParameters(params);
}
}
}
| 1,468 | 23.081967 | 78 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/optimize/AGIS.java | /* Copyright (C) 2002 Univ. of Massachusetts Amherst, Computer Science Dept.
This file is part of "MALLET" (MAchine Learning for LanguagE Toolkit).
http://www.cs.umass.edu/~mccallum/mallet
This software is provided under the terms of the Common Public License,
version 1.0, as published by http://www.opensource.org. For further
information, see the file `LICENSE' included with this distribution. */
/**
* Implementation of Salakhutdinav and Roweis Adaptive Overrelaxed GIS (2003)
@author Ryan McDonald <a href="mailto:[email protected]">[email protected]</a>
*/
package cc.mallet.optimize;
import java.util.logging.*;
import cc.mallet.optimize.Optimizable;
import cc.mallet.types.MatrixOps;
public class AGIS implements Optimizer
{
private static Logger logger =
Logger.getLogger("edu.umass.cs.mallet.base.minimize.AGIS");
double initialStepSize = 1;
double alpha;
double eta = 1.0;
double tolerance = 0.0001;
int maxIterations = 200;
Optimizable.ByGISUpdate maxable;
boolean converged = false;
boolean backTrack;
// "eps" is a small number to recitify the special case of converging
// to exactly zero function value
final double eps = 1.0e-10;
public AGIS (Optimizable.ByGISUpdate maxable, double alph)
{
this(maxable,alph,true);
}
public AGIS (Optimizable.ByGISUpdate maxable, double alph, boolean backTrack)
{
this.maxable = maxable;
this.alpha = alph;
this.backTrack = backTrack;
}
public Optimizable getOptimizable () { return maxable; }
public boolean isConverged () { return converged; }
public boolean optimize () {
return optimize (maxIterations);
}
public boolean optimize (int numIterations)
{
int iterations;
double[] params = new double[maxable.getNumParameters()];
double[] gis = new double[maxable.getNumParameters()];
double[] old_params = new double[maxable.getNumParameters()];
double[] updates = new double[maxable.getNumParameters()];
maxable.getParameters(params);
maxable.getParameters(gis);
maxable.getParameters(old_params);
for (iterations = 0; iterations < numIterations; iterations++) {
boolean complete = false;
double old = maxable.getValue();
maxable.getGISUpdate(updates);
MatrixOps.plusEquals(gis,updates);
MatrixOps.plusEquals(params,updates,eta);
maxable.setParameters(params);
double next = maxable.getValue();
// Different from normal AGIS, only fall back to GIS updates
// If log-likelihood gets worse
// i.e. if lower log-likelihood, always make AGIS update
if(next > old) {
complete = true;
// don't let eta get too large
if(eta*alpha < 99999999.0)
eta = eta*alpha;
}
if(backTrack && complete == false) {
// gone too far
// unlike Roweis et al., we will back track on eta to find
// acceptable value, instead of automatically setting it to 1
while(eta > 1.0 && complete == false) {
eta = eta/2.0;
MatrixOps.set(params,old_params);
MatrixOps.plusEquals(params,updates,eta);
maxable.setParameters(params);
next = maxable.getValue();
if(next > old)
complete = true;
}
}
else if(complete == false) {
maxable.setParameters(gis);
eta = 1.0;
next = maxable.getValue();
}
logger.info("eta: " + eta);
if (2.0*Math.abs(next-old) <= tolerance*(Math.abs(next)+Math.abs(old)+eps)) {
converged = true;
return true;
}
if(numIterations > 1) {
maxable.getParameters(params);
maxable.getParameters(old_params);
maxable.getParameters(gis);
}
}
converged = false;
return false;
}
}
| 3,638 | 25.562044 | 87 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/optimize/Optimizable.java | /* Copyright (C) 2002 Univ. of Massachusetts Amherst, Computer Science Dept.
This file is part of "MALLET" (MAchine Learning for LanguagE Toolkit).
http://www.cs.umass.edu/~mccallum/mallet
This software is provided under the terms of the Common Public License,
version 1.0, as published by http://www.opensource.org. For further
information, see the file `LICENSE' included with this distribution. */
/**
@author Andrew McCallum <a href="mailto:[email protected]">[email protected]</a>
*/
package cc.mallet.optimize;
import java.util.Collection;
public interface Optimizable
{
public int getNumParameters ();
public void getParameters (double[] buffer);
public double getParameter (int index);
public void setParameters (double[] params);
public void setParameter (int index, double value);
public interface ByValue extends Optimizable
{
public double getValue ();
}
public interface ByGradient extends Optimizable
{
public void getValueGradient (double[] buffer);
}
public interface ByGradientValue extends Optimizable
{
public void getValueGradient (double[] buffer);
public double getValue ();
}
public interface ByHessian extends Optimizable.ByGradientValue
{
public void getValueHessian (double[][] buffer);
}
public interface ByVotedPerceptron extends Optimizable
{
public int getNumInstances ();
public void getValueGradientForInstance (int instanceIndex, double[] bufffer);
}
public interface ByGISUpdate extends Optimizable
{
public double getValue();
public void getGISUpdate (double[] buffer);
}
public interface ByBatchGradient extends Optimizable {
public void getBatchValueGradient (double[] buffer, int batchIndex, int[] batchAssignments);
public double getBatchValue(int batchIndex, int[] batchAssignments);
}
// gsc: for computing gradient from batches in multiple threads
public interface ByCombiningBatchGradient extends Optimizable {
public void getBatchValueGradient (double[] buffer, int batchIndex, int[] batchAssignments);
public double getBatchValue(int batchIndex, int[] batchAssignments);
public void combineGradients (Collection<double[]> batchGradients, double[] buffer);
public int getNumBatches();
}
}
| 2,240 | 27.730769 | 94 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/optimize/GradientBracketLineOptimizer.java | /* Copyright (C) 2002 Univ. of Massachusetts Amherst, Computer Science Dept.
This file is part of "MALLET" (MAchine Learning for LanguagE Toolkit).
http://www.cs.umass.edu/~mccallum/mallet
This software is provided under the terms of the Common Public License,
version 1.0, as published by http://www.opensource.org. For further
information, see the file `LICENSE' included with this distribution. */
/**
@author Andrew McCallum <a href="mailto:[email protected]">[email protected]</a>
*/
package cc.mallet.optimize;
import java.util.logging.*;
import cc.mallet.optimize.Optimizable;
import cc.mallet.types.Matrix;
import cc.mallet.types.MatrixOps;
import cc.mallet.util.MalletLogger;
// Brents method using derivative information
// p405, "Numeric Recipes in C"
public class GradientBracketLineOptimizer implements LineOptimizer {
private static Logger logger = MalletLogger.getLogger(GradientBracketLineOptimizer.class.getName());
int maxIterations = 50;
Optimizable.ByGradientValue optimizable;
public GradientBracketLineOptimizer (Optimizable.ByGradientValue function) {
this.optimizable = function;
}
/*
public double maximize (Optimizable function, Matrix line, double initialStep) {
return maximize ((Optimizable.ByGradient)function, line, initialStep);
}*/
// TODO
// This seems to work but is slower than BackTrackLineSearch. Why?
// Return the last step size used.
// "line" should point in the direction we want to move the parameters to get
// higher value.
public double optimize (double[] line, double initialStep)
{
assert (initialStep > 0);
double[] parameters = new double[optimizable.getNumParameters()];
double[] gradient = new double[optimizable.getNumParameters()];
optimizable.getParameters(parameters);
optimizable.getValueGradient(gradient);
// a=left, b=center, c=right, t=test
double ax, bx, cx, tx; // steps (domain), these are deltas from initial params!
double ay, by, cy, ty; // costs (range)
double ag, bg, cg, tg; // projected gradients
double ox; // the x step of the last function call
double origY;
tx = ax = bx = cx = ox = 0;
ty = ay = by = cy = origY = optimizable.getValue();
tg = ag = bg = MatrixOps.dotProduct(gradient,line);
// Make sure search-line points upward
//logger.info ("Initial gradient = "+tg);
if (ag <= 0) {
throw new InvalidOptimizableException
("The search direction \"line\" does not point down uphill. "
+ "gradient.dotProduct(line)="+ag+", but should be positive");
}
// Find an cx value where the gradient points the other way. Then
// we will know that the (local) zero-gradient minimum falls
// in between ax and cx.
int iterations = 0;
do {
if (iterations++ > maxIterations)
throw new IllegalStateException ("Exceeded maximum number allowed iterations searching for gradient cross-over.");
// If we are still looking to cross the minimum, move ax towards it
ax = bx; ay = by; ag = bg;
// Save this (possibly) middle point; it might make an acceptable bx
bx = tx; by = ty; bg = tg;
if (tx == 0) {
if (initialStep < 1.0) {
tx = initialStep;
}
else {
tx = 1.0;
}
// Sometimes the "suggested" initialStep is
// very large and causes values to go to
// infinity.
//tx = initialStep;
//tx = 1.0;
}
else {
tx *= 3.0;
}
//logger.info ("Gradient cross-over search, incrementing by "+(tx-ox));
MatrixOps.plusEquals(parameters,line,tx-ox);
optimizable.setParameters (parameters);
ty = optimizable.getValue();
optimizable.getValueGradient(gradient);
tg = MatrixOps.dotProduct(gradient,line);
//logger.info ("Next gradient = "+tg);
ox = tx;
} while (tg > 0);
//System.err.println(iterations + " total iterations in A.");
cx = tx; cy = ty; cg = tg;
//logger.info ("After gradient cross-over ax="+ax+" bx="+bx+" cx="+cx);
//logger.info ("After gradient cross-over ay="+ay+" by="+by+" cy="+cy);
//logger.info ("After gradient cross-over ag="+ag+" bg="+bg+" cg="+cg);
// We need to find a "by" that is less than both "ay" and "cy"
assert (!Double.isNaN(by));
while (by <= ay || by <= cy || bx == ax) {
// Last condition would happen if we did first while-loop only once
if (iterations++ > maxIterations)
throw new IllegalStateException ("Exceeded maximum number allowed iterations searching for bracketed minimum, iteratation count = "+iterations);
// xxx What should this tolerance be?
// xxx I'm nervous that this is masking some assert()s below that were previously failing.
// If they were failing due to round-off error, that's OK, but if not...
if ((Math.abs(bg) < 100 || Math.abs(ay-by) < 10 || Math.abs(by-cy) < 10) && bx != ax)
//if ((Math.abs(bg) < 10 || Math.abs(ay-by) < 1 || Math.abs(by-cy) < 1) && bx != ax)
// Magically, we are done
break;
// Instead make a version that finds the interpolating point by
// fitting a parabola, and then jumps to that minimum. If the
// actual y value is within "tolerance" of the parabola fit's
// guess, then we are done, otherwise, use the parabola's x to
// split the region, and try again.
// There might be some cases where this will perform worse than
// simply bisecting, as we do now, when the function is not at
// all parabola shaped.
// If the gradients ag and bg point in the same direction, then
// the value by must be less than ay. And vice-versa for bg and cg.
//assert (ax==bx || ((ag*bg)>=0 && by>ay) || (((bg*cg)>=0 && by>cy)));
assert (!Double.isNaN(bg));
if (bg > 0) {
// the minimum is at higher x values than bx; drop ax
assert (by >= ay);
ax = bx; ay = by; ag = bg;
} else {
// the minimum is at lower x values than bx; drop cx
assert (by >= cy);
cx = bx; cy = by; cg = bg;
}
// Find a new mid-point
bx = (ax + cx) / 2;
//logger.info ("Minimum bx search, incrementing by "+(bx-ox));
MatrixOps.plusEquals(parameters,line,bx - ox);
optimizable.setParameters (parameters);
by = optimizable.getValue();
assert (!Double.isNaN(by));
optimizable.getValueGradient(gradient);
bg = MatrixOps.dotProduct(gradient,line);
ox = bx;
//logger.info (" During min bx search ("+iterations+") ax="+ax+" bx="+bx+" cx="+cx);
//logger.info (" During min bx search ("+iterations+") ay="+ay+" by="+by+" cy="+cy);
//logger.info (" During min bx search ("+iterations+") ag="+ag+" bg="+bg+" cg="+cg);
}
// We now have two points (ax, cx) that straddle the minimum, and a mid-point
// bx with a value lower than either ay or cy.
tx = ax
+ (((bx-ax)*(bx-ax)*(cy-ay)
-(cx-ax)*(cx-ax)*(by-ay))
/
(2.0 * ((bx-ax)*(cy-ay)-(cx-ax)*(by-ay))));
//logger.info ("Ending ax="+ax+" bx="+bx+" cx="+cx+" tx="+tx);
//logger.info ("Ending ay="+ay+" by="+by+" cy="+cy);
MatrixOps.plusEquals(parameters,line,tx - ox);
optimizable.setParameters (parameters);
//assert (function.getValue() >= origY);
logger.info ("Ending cost = "+optimizable.getValue());
// As a suggestion for the next initalStep, return the distance
// from our initialStep to the minimum we found.
//System.err.println(iterations + " total iterations in B.");
//System.exit(0);
return Math.max(1,tx - initialStep);
}
}
| 7,403 | 36.02 | 148 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/optimize/OptimizerEvaluator.java | /* Copyright (C) 2003 Univ. of Massachusetts Amherst, Computer Science Dept.
This file is part of "MALLET" (MAchine Learning for LanguagE Toolkit).
http://www.cs.umass.edu/~mccallum/mallet
This software is provided under the terms of the Common Public License,
version 1.0, as published by http://www.opensource.org. For further
information, see the file `LICENSE' included with this distribution. */
package cc.mallet.optimize;
/**
* Callback interface that allows optimizer clients to perform some operation after every iteration.
*
* Created: Sep 28, 2005
*
* @author <A HREF="mailto:[email protected]>[email protected]</A>
* @version $Id: OptimizerEvaluator.java,v 1.1 2007/10/22 21:37:39 mccallum Exp $
*/
public interface OptimizerEvaluator {
public interface ByGradient {
/**
* Performs some operation at the end of each iteration of a maximizer.
*
* @param maxable Function that's being optimized.
* @param iter Number of just-finished iteration.
* @return true if optimization should continue.
*/
boolean evaluate (Optimizable.ByGradientValue maxable, int iter);
}
public interface ByBatchGradient {
/**
* Performs some operation at the end of every batch.
*
* @param maxable Function that's being optimized.
* @param iter Number of just-finished iteration.
* @param sampleId Number of just-finished sample.
* @param numSamples Number of samples total.
* @param sampleAssns Assignments of instances to samples
* @return true if optimization should continue.
*/
boolean evaluate (Optimizable.ByBatchGradient maxable, int iter, int sampleId, int numSamples, int[] sampleAssns);
}
}
| 1,742 | 36.891304 | 118 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/optimize/OrthantWiseLimitedMemoryBFGS.java | package cc.mallet.optimize;
import java.util.LinkedList;
import java.util.logging.Logger;
import cc.mallet.types.MatrixOps;
import cc.mallet.util.MalletLogger;
/**
* Implementation of orthant-wise limited memory quasi Newton method for
* optimizing convex L1-regularized objectives. See:
* "Scalable training of l1-regularized log-linear models" by Galen Andrew and
* Jianfeng Gao in ICML 2007 for details. This code is an adaptation of the
* freely-available C++ code on Galen's webpage.
*
* @author Kedar Bellare
*/
public class OrthantWiseLimitedMemoryBFGS implements Optimizer {
private static Logger logger = MalletLogger
.getLogger(OrthantWiseLimitedMemoryBFGS.class.getName());
boolean converged = false;
Optimizable.ByGradientValue optimizable;
// name of optimizable for value output
String optName;
final int maxIterations = 1000;
final double tolerance = .0001;
final double gradientTolerance = .001;
final double eps = 1.0e-5;
double l1Weight;
// The number of corrections used in BFGS update
// ideally 3 <= m <= 7. Larger m means more cpu time, memory.
final int m = 4;
// State of optimizer search
// oldValue = value before line search, value = value after line search
double oldValue, value, yDotY;
// grad = gradient
double[] grad, oldGrad, direction, steepestDescentDirection, parameters,
oldParameters;
// s = list of m previous "difference in parameters" values
// y = list of m previous "difference in grad" values
LinkedList<double[]> s, y;
// rho = intermediate calculation
LinkedList<Double> rhos;
double[] alphas;
int iterations;
public OrthantWiseLimitedMemoryBFGS(Optimizable.ByGradientValue function) {
this(function, 0.0);
}
public OrthantWiseLimitedMemoryBFGS(Optimizable.ByGradientValue function,
double l1wt) {
this.optimizable = function;
this.l1Weight = l1wt;
String parts[] = optimizable.getClass().getName().split("\\.");
this.optName = parts[parts.length - 1];
// initialize optimizer state
iterations = 0;
s = new LinkedList<double[]>();
y = new LinkedList<double[]>();
rhos = new LinkedList<Double>();
alphas = new double[m];
MatrixOps.setAll(alphas, 0.0);
yDotY = 0;
int numParameters = optimizable.getNumParameters();
// get initial parameters
parameters = new double[numParameters];
optimizable.getParameters(parameters);
// get initial value
value = evalL1();
// get initial gradient
grad = new double[numParameters];
evalGradient();
// initialize direction
direction = new double[numParameters];
steepestDescentDirection = new double[numParameters];
// initialize backups
oldParameters = new double[numParameters];
oldGrad = new double[numParameters];
}
public Optimizable getOptimizable() {
return optimizable;
}
public boolean isConverged() {
return converged;
}
public int getIteration() {
return iterations;
}
public boolean optimize() {
return optimize(Integer.MAX_VALUE);
}
public boolean optimize(int numIterations) {
logger.fine("Entering OWL-BFGS.optimize(). L1 weight=" + l1Weight
+ " Initial Value=" + value);
for (int iter = 0; iter < numIterations; iter++) {
// create descent direction
makeSteepestDescDir();
// adjust for curvature
mapDirByInverseHessian(yDotY);
// fix direction signs
fixDirSigns();
// backup parameters and gradient; then perform line-search
storeSrcInDest(parameters, oldParameters);
storeSrcInDest(grad, oldGrad);
backTrackingLineSearch();
// update gradient after line search
evalGradient();
// check for termination conditions
if (checkValueTerminationCondition()) {
logger.info("Exiting OWL-BFGS on termination #1:");
logger.info("value difference below tolerance (oldValue: "
+ oldValue + " newValue: " + value);
converged = true;
return true;
}
if (checkGradientTerminationCondition()) {
logger.info("Exiting OWL-BFGS on termination #2:");
logger.info("gradient=" + MatrixOps.twoNorm(grad) + " < "
+ gradientTolerance);
converged = true;
return true;
}
// update hessian approximation
yDotY = shift();
iterations++;
if (iterations > maxIterations) {
logger.info("Too many iterations in OWL-BFGS. "
+ "Continuing with current parameters.");
converged = true;
return true;
}
}
return false;
}
/**
* Evaluate value. Make it a minimization problem.
*/
private double evalL1() {
double val = -optimizable.getValue();
double sumAbsWt = 0;
if (l1Weight > 0) {
for (double param : parameters) {
if (Double.isInfinite(param))
continue;
sumAbsWt += Math.abs(param) * l1Weight;
}
}
logger.info("getValue() (" + optName + ".getValue() = " + val
+ " + |w|=" + sumAbsWt + ") = " + (val + sumAbsWt));
return val + sumAbsWt;
}
/**
* Evaluate gradient, make it a descent direction.
*/
private void evalGradient() {
optimizable.getValueGradient(grad);
adjustGradForInfiniteParams(grad);
MatrixOps.timesEquals(grad, -1.0);
}
/**
* Creates steepest ascent direction from gradient and L1-regularization.
*/
private void makeSteepestDescDir() {
if (l1Weight == 0) {
for (int i = 0; i < grad.length; i++) {
direction[i] = -grad[i];
}
} else {
for (int i = 0; i < grad.length; i++) {
if (parameters[i] < 0) {
direction[i] = -grad[i] + l1Weight;
} else if (parameters[i] > 0) {
direction[i] = -grad[i] - l1Weight;
} else {
if (grad[i] < -l1Weight) {
direction[i] = -grad[i] - l1Weight;
} else if (grad[i] > l1Weight) {
direction[i] = -grad[i] + l1Weight;
} else {
direction[i] = 0;
}
}
}
}
storeSrcInDest(direction, steepestDescentDirection);
}
private void adjustGradForInfiniteParams(double d[]) {
for (int i = 0; i < parameters.length; i++) {
if (Double.isInfinite(parameters[i]))
d[i] = 0;
}
}
/**
* Adjusts direction based on approximate hessian inverse.
*
* @param yDotY
* y^T * y in BFGS calculation.
*/
private void mapDirByInverseHessian(double yDotY) {
if (s.size() == 0)
return;
int count = s.size();
for (int i = count - 1; i >= 0; i--) {
alphas[i] = -MatrixOps.dotProduct(s.get(i), direction)
/ rhos.get(i);
MatrixOps.plusEquals(direction, y.get(i), alphas[i]);
}
double scalar = rhos.get(count - 1) / yDotY;
logger.fine("Direction multiplier = " + scalar);
MatrixOps.timesEquals(direction, scalar);
for (int i = 0; i < count; i++) {
double beta = MatrixOps.dotProduct(y.get(i), direction)
/ rhos.get(i);
MatrixOps.plusEquals(direction, s.get(i), -alphas[i] - beta);
}
}
private void fixDirSigns() {
if (l1Weight > 0) {
for (int i = 0; i < direction.length; i++) {
if (direction[i] * steepestDescentDirection[i] <= 0) {
direction[i] = 0;
}
}
}
}
private double dirDeriv() {
if (l1Weight == 0) {
return MatrixOps.dotProduct(direction, grad);
} else {
double val = 0.0;
for (int i = 0; i < direction.length; i++) {
if (direction[i] != 0) {
if (parameters[i] < 0) {
val += direction[i] * (grad[i] - l1Weight);
} else if (parameters[i] > 0) {
val += direction[i] * (grad[i] + l1Weight);
} else if (direction[i] < 0) {
val += direction[i] * (grad[i] - l1Weight);
} else if (direction[i] > 0) {
val += direction[i] * (grad[i] + l1Weight);
}
}
}
return val;
}
}
private double shift() {
double[] nextS = null, nextY = null;
int listSize = s.size();
if (listSize < m) {
nextS = new double[parameters.length];
nextY = new double[parameters.length];
} else {
nextS = s.removeFirst();
nextY = y.removeFirst();
rhos.removeFirst();
}
double rho = 0.0;
double yDotY = 0.0;
for (int i = 0; i < parameters.length; i++) {
if (Double.isInfinite(parameters[i])
&& Double.isInfinite(oldParameters[i])
&& parameters[i] * oldParameters[i] > 0)
nextS[i] = 0;
else
nextS[i] = parameters[i] - oldParameters[i];
if (Double.isInfinite(grad[i]) && Double.isInfinite(oldGrad[i])
&& grad[i] * oldGrad[i] > 0)
nextY[i] = 0;
else
nextY[i] = grad[i] - oldGrad[i];
rho += nextS[i] * nextY[i];
yDotY += nextY[i] * nextY[i];
}
logger.fine("rho=" + rho);
if (rho < 0) {
throw new InvalidOptimizableException("rho = " + rho + " < 0: "
+ "Invalid hessian inverse. "
+ "Gradient change should be opposite of parameter change.");
}
s.addLast(nextS);
y.addLast(nextY);
rhos.addLast(rho);
// update old params and grad
storeSrcInDest(parameters, oldParameters);
storeSrcInDest(grad, oldGrad);
return yDotY;
}
private void storeSrcInDest(double src[], double dest[]) {
System.arraycopy(src, 0, dest, 0, src.length);
}
// backtrack line search
private void backTrackingLineSearch() {
double origDirDeriv = dirDeriv();
if (origDirDeriv >= 0) {
throw new InvalidOptimizableException(
"L-BFGS chose a non-ascent direction: check your gradient!");
}
double alpha = 1.0;
double backoff = 0.5;
if (iterations == 0) {
double normDir = Math.sqrt(MatrixOps.dotProduct(direction,
direction));
alpha = 1.0 / normDir;
backoff = 0.1;
}
final double c1 = 1e-4;
// store old value
oldValue = value;
logger.fine("*** Starting line search iter=" + iterations);
logger.fine("iter[" + iterations + "] Value at start of line search = "
+ value);
while (true) {
// update parameters and gradient
getNextPoint(alpha);
// find new value
value = evalL1();
logger.fine("iter[" + iterations + "] Using alpha = " + alpha
+ " new value = " + value + " |grad|="
+ MatrixOps.twoNorm(grad) + " |x|="
+ MatrixOps.twoNorm(parameters));
if (value <= oldValue + c1 * origDirDeriv * alpha)
break;
alpha *= backoff;
}
}
private void getNextPoint(double alpha) {
for (int i = 0; i < parameters.length; i++) {
parameters[i] = oldParameters[i] + direction[i] * alpha;
if (l1Weight > 0) {
// do not allow to cross orthant boundaries if using
// L1-regularization
if (oldParameters[i] * parameters[i] < 0) {
parameters[i] = 0.0;
}
}
}
optimizable.setParameters(parameters);
}
// termination conditions
private boolean checkValueTerminationCondition() {
return (2.0 * Math.abs(value - oldValue) <= tolerance
* (Math.abs(value) + Math.abs(oldValue) + eps));
}
private boolean checkGradientTerminationCondition() {
return MatrixOps.twoNorm(grad) < gradientTolerance;
}
}
| 10,587 | 24.761557 | 78 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/optimize/tests/TestOptimizer.java | /* Copyright (C) 2003 Univ. of Massachusetts Amherst, Computer Science Dept.
This file is part of "MALLET" (MAchine Learning for LanguagE Toolkit).
http://www.cs.umass.edu/~mccallum/mallet
This software is provided under the terms of the Common Public License,
version 1.0, as published by http://www.opensource.org. For further
information, see the file `LICENSE' included with this distribution. */
package cc.mallet.optimize.tests;
import cc.mallet.optimize.*;
import junit.framework.TestCase;
import junit.framework.TestSuite;
/**
* Unit Test for class TestMaximizer.java
*
*
* Created: Mon Apr 26 19:54:25 2004
*
* @author <a href="mailto:[email protected]">Charles Sutton</a>
* @version $Id: TestMaximizer.java,v 1.1 2007/10/22 21:37:49 mccallum Exp $
*/
public class TestOptimizer extends TestCase {
public TestOptimizer(String name) {
super(name);
}
// Maximizable for 3x^2 - 5x + 2
static class SimplePoly implements Optimizable.ByGradientValue {
double[] params = new double[1];
public void getParameters(double[] doubleArray) {
doubleArray[0] = params[0];
}
public int getNumParameters() {
return 1;
}
public double getParameter(int n) {
return params[0];
};
public void setParameters(double[] doubleArray) {
params[0] = doubleArray[0];
}
public void setParameter(int n, double d) {
params[n] = d;
}
public double getValue() {
System.out.println("param = " + params[0] + " value = "
+ (-3 * params[0] * params[0] + 5 * params[0] - 2));
return -3 * params[0] * params[0] + 5 * params[0] - 2;
}
public void getValueGradient(double[] buffer) {
buffer[0] = -6 * params[0] + 5;
}
}
/*
* public void testBoldDriver () { SimplePoly poly = new SimplePoly ();
* Maximizer.ByGradient bold = new BoldDriver (); bold.maximize (poly);
* assertEquals (5.0/6.0, poly.params [0], 1e-3); }
*/
public void testGradientAscent() {
SimplePoly poly = new SimplePoly();
Optimizer gd = new GradientAscent(poly);
gd.optimize();
assertEquals(5.0 / 6.0, poly.params[0], 1e-3);
}
public void testLinearLBFGS() {
SimplePoly poly = new SimplePoly();
Optimizer bfgs = new LimitedMemoryBFGS(poly);
bfgs.optimize();
assertEquals(5.0 / 6.0, poly.params[0], 1e-3);
}
public void testOrthantWiseLBFGSWithoutL1() {
SimplePoly poly = new SimplePoly();
Optimizer bfgs = new OrthantWiseLimitedMemoryBFGS(poly);
bfgs.optimize();
assertEquals(5.0 / 6.0, poly.params[0], 1e-3);
}
public void testOrthantWiseLBFGSWithL1() {
SimplePoly poly = new SimplePoly();
Optimizer bfgs = new OrthantWiseLimitedMemoryBFGS(poly, 3.0);
bfgs.optimize();
assertEquals(2.0 / 6.0, poly.params[0], 1e-3);
}
public void testConjugateGradient() {
SimplePoly poly = new SimplePoly();
Optimizer cg = new ConjugateGradient(poly);
cg.optimize();
assertEquals(5.0 / 6.0, poly.params[0], 1e-3);
}
/**
* @return a <code>TestSuite</code>
*/
public static TestSuite suite() {
return new TestSuite(TestOptimizer.class);
}
public static void main(String[] args) {
junit.textui.TestRunner.run(suite());
}
}// TestMaximizer
| 3,138 | 25.601695 | 76 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/optimize/tests/TestOptimizable.java | /* Copyright (C) 2002 Univ. of Massachusetts Amherst, Computer Science Dept.
This file is part of "MALLET" (MAchine Learning for LanguagE Toolkit).
http://www.cs.umass.edu/~mccallum/mallet
This software is provided under the terms of the Common Public License,
version 1.0, as published by http://www.opensource.org. For further
information, see the file `LICENSE' included with this distribution. */
/**
@author Andrew McCallum <a href="mailto:[email protected]">[email protected]</a>
*/
package cc.mallet.optimize.tests;
import junit.framework.*;
import java.util.logging.*;
import java.io.*;
import java.util.Random;
import cc.mallet.classify.*;
import cc.mallet.optimize.LineOptimizer;
import cc.mallet.optimize.Optimizable;
import cc.mallet.pipe.*;
import cc.mallet.pipe.iterator.*;
import cc.mallet.types.*;
import cc.mallet.util.*;
/**
* Contains static methods for testing subclasses of
* Maximizable and Maximizable.ByGradient. Especially
* useful are methods that verify the consistency of the value
* and gradient functions of an instance of
* Maximizable.ByGradient.
*/
public class TestOptimizable extends TestCase
{
private static Logger logger =
MalletLogger.getLogger(TestOptimizable.class.getName());
public TestOptimizable (String name) {
super (name);
}
static private int numComponents = -1;
/**
* Sets the number of gradient components that will be checked.
* If negative, all will be checked.
*/
public static void setNumComponents (int n) { numComponents = n; }
/**
* Tests that parameters set by setParameters can be retrieved by
* getParameters.
* @param maxable Instance of a Maximizable that should be tested.
* Its current parameters will be overwritten.
*/
public static boolean testGetSetParameters (Optimizable maxable)
{
System.out.println ("TestMaximizable testGetSetParameters");
// Set all the parameters to unique values using setParameters()
double[] parameters = new double [maxable.getNumParameters()];
maxable.getParameters (parameters);
for (int i = 0; i < parameters.length; i++)
parameters[i] = (double)i;
maxable.setParameters (parameters);
// Test to make sure those parameters are there
MatrixOps.setAll (parameters, 0.0);
maxable.getParameters (parameters);
for (int i = 0; i < parameters.length; i++)
assertTrue (parameters[i] == (double)i);
return true;
}
public static double
testValueAndGradientInDirection (Optimizable.ByGradientValue maxable, double[] direction)
{
int numParameters = maxable.getNumParameters();
assert (numParameters == direction.length);
double[] oldParameters = new double[numParameters];
double[] parameters = new double[numParameters];
double[] normalizedDirection = direction.clone();
System.arraycopy(direction, 0, normalizedDirection, 0, numParameters);
MatrixOps.absNormalize(normalizedDirection);
double value = maxable.getValue();
// the gradient from the optimizable function
double[] analyticGradient = new double[numParameters];
maxable.getParameters (parameters);
maxable.getParameters (oldParameters);
maxable.getValueGradient (analyticGradient);
// the gradient calculate from the slope of the value
// This setting of epsilon should make the individual elements of
// the analytical gradient and the empirical gradient equal. This
// simplifies the comparison of the individual dimensions of the
// gradient and thus makes debugging easier.
double directionGradient = MatrixOps.dotProduct (analyticGradient, normalizedDirection);
double epsilon = 0.1 / MatrixOps.absNorm(analyticGradient);
double tolerance = 0.00001 * directionGradient; // this was "epsilon * 5";
System.out.println ("epsilon = "+epsilon+" tolerance="+tolerance);
MatrixOps.plusEquals (parameters, normalizedDirection, epsilon);
//logger.fine ("Parameters:"); parameters.print();
maxable.setParameters (parameters);
double epsValue = maxable.getValue();
double slope = (epsValue - value) / epsilon;
System.out.println ("value="+value+" epsilon="+epsilon+" epsValue="+
epsValue+" slope = "+slope+" gradient="+directionGradient);
assert (!Double.isNaN (slope));
double slopeDifference = Math.abs(slope - directionGradient);
logger.info ("TestMaximizable "+
": slope tolerance = "+tolerance+
": gradient slope = "+directionGradient+
", value+epsilon slope = "+slope+
": slope difference = "+slopeDifference);
maxable.setParameters (oldParameters);
assert (Math.abs(slopeDifference) < tolerance) : "Slope difference "+slopeDifference+" is greater than tolerance "+tolerance;
return slopeDifference;
}
/**
* Tests that the value and gradient function are consistent
* at the current parameters.
* Computes both the analytic gradient (the one given by
* <tt>maxable.getValueGradient</tt>) and the empirical gradient,
* which is (if x are the current parameters and f the function
* computed by maxable) <tt>f(x + epsilon) - f(x)</tt>. Verifies
* that the angle between the empirical and analytic gradients
* are close to 0.
* @see #testValueAndGradient testValueAndGradient
* @see #testValueAndGradientRandomParameters testValueAndGradientRandomParameters
* @throws IllegalStateException If the angle is above the tolerance
*/
public static double
testValueAndGradientCurrentParameters (Optimizable.ByGradientValue maxable)
{
double[] parameters = new double [maxable.getNumParameters()];
double value = maxable.getValue();
// the gradient from the maximizable function
double[] analyticGradient = new double[maxable.getNumParameters()];
double[] empiricalGradient = new double[maxable.getNumParameters()];
maxable.getParameters (parameters);
maxable.getValueGradient (analyticGradient);
// the gradient calculate from the slope of the value
maxable.getValueGradient (empiricalGradient);
// This setting of epsilon should make the individual elements of
// the analytical gradient and the empirical gradient equal. This
// simplifies the comparison of the individual dimensions of the
// gradient and thus makes debugging easier.
// cas: However, avoid huge epsilon if norm of analytic gradient is
// close to 0.
// Next line used to be: double norm = Math.max (0.1, MatrixOps.twoNorm(analyticGradient));
// but if all the components of the analyticalGradient are very small, the squaring in the
// twoNorm causes epsilon to be too large. -AKM
double norm = Math.max (0.1, MatrixOps.absNorm(analyticGradient));
double epsilon = 0.1 / norm;
double tolerance = epsilon * 5;
System.out.println ("epsilon = "+epsilon+" tolerance="+tolerance);
int sampleParameterInterval = -1;
if (numComponents > 0) {
sampleParameterInterval = Math.max (1, parameters.length / numComponents);
logger.info ("Will check every "+sampleParameterInterval+"-th component.");
}
// Check each direction, perturb it, measure new value, and make
// sure it agrees with the gradient from
// maxable.getValueGradient()
for (int i = 0; i < parameters.length; i++) {
// { int i = 0; // Uncomment this line to debug one parameter at a time -cas
if ((parameters.length >= sampleParameterInterval) &&
(i % sampleParameterInterval != 0))
continue;
double param = parameters[i];
parameters[i] = param + epsilon;
//logger.fine ("Parameters:"); parameters.print();
maxable.setParameters (parameters);
double epsValue = maxable.getValue();
double slope = (epsValue - value) / epsilon;
System.out.println ("value="+value+" epsValue="+epsValue+" slope["+i+"] = "+slope+" gradient[]="+analyticGradient[i]);
assert (!Double.isNaN (slope));
logger.info ("TestMaximizable checking singleIndex "+i+
": gradient slope = "+analyticGradient[i]+
", value+epsilon slope = "+slope+
": slope difference = "+(slope - analyticGradient[i]));
// No negative below because the gradient points in the direction
// of maximizing the function.
empiricalGradient[i] = slope;
parameters[i] = param;
}
// Normalize the matrices to have the same L2 length
System.out.println ("analyticGradient.twoNorm = "+
MatrixOps.twoNorm(analyticGradient));
System.out.println ("empiricalGradient.twoNorm = "+
MatrixOps.twoNorm(empiricalGradient));
MatrixOps.timesEquals (analyticGradient,
1.0/MatrixOps.twoNorm(analyticGradient));
MatrixOps.timesEquals (empiricalGradient,
1.0/MatrixOps.twoNorm(empiricalGradient));
/*
System.out.println("N ANA EMP");
for (int i = 0; i < analyticGradient.length; i++) {
System.out.println(i+" "+analyticGradient[i]+" "+empiricalGradient[i]);
}
*/
// Return the angle between the two vectors, in radians
double dot = MatrixOps.dotProduct (analyticGradient,empiricalGradient);
if (Maths.almostEquals (dot, 1.0)) {
logger.info ("TestMaximizable angle is zero.");
return 0.0;
} else {
double angle = Math.acos (dot);
logger.info ("TestMaximizable angle = "+angle);
if (Math.abs(angle) > tolerance)
throw new IllegalStateException ("Gradient/Value mismatch: angle="+
angle + " tol: " + tolerance);
if (Double.isNaN (angle))
throw new IllegalStateException ("Gradient/Value error: angle is NaN!");
return angle;
}
}
/**
* Tests that getValue and getValueGradient are consistent.
* Tests for consistency at <tt>params = 0</tt> and at
* <tt> params = -0.0001 * grad(f)</tt>
* @see #testValueAndGradientCurrentParameters testValueAndGradientCurrentParameters
* @throws IllegalStateException If the test fails.
*/
public static boolean testValueAndGradient (Optimizable.ByGradientValue maxable)
{
double[] parameters = new double [maxable.getNumParameters()];
MatrixOps.setAll (parameters, 0.0);
maxable.setParameters (parameters);
testValueAndGradientCurrentParameters (maxable);
MatrixOps.setAll (parameters, 0.0);
double[] delta = new double[maxable.getNumParameters()];
maxable.getValueGradient (delta);
logger.info ("Gradient two-Norm = "+MatrixOps.twoNorm(delta));
logger.info (" max parameter change = "+(MatrixOps.infinityNorm(delta) * -0.001));
MatrixOps.timesEquals (delta, -0.0001);
MatrixOps.plusEquals (parameters, delta);
maxable.setParameters (parameters);
testValueAndGradientCurrentParameters (maxable);
return true;
}
/**
* Tests that getValue and getValueGradient are consistent
* at a random parameter setting.
* @see #testValueAndGradientCurrentParameters testValueAndGradientCurrentParameters
* @throws IllegalStateException If the test fails.
*/
public static boolean testValueAndGradientRandomParameters
(Optimizable.ByGradientValue maxable, Random r)
{
double[] params = new double [maxable.getNumParameters()];
for (int i = 0; i < params.length; i++) {
params[i] = r.nextDouble ();
if (r.nextBoolean ())
params [i] = -params[i];
}
maxable.setParameters (params);
testValueAndGradientCurrentParameters (maxable);
return true;
}
// Maximizable for 3x^2 - 5x + 2
static class SimplePoly implements Optimizable.ByGradientValue {
double[] params = new double [1];
public void getParameters(double[] doubleArray) {
doubleArray [0] = params [0];
}
public int getNumParameters() { return 1; }
public double getParameter(int n) { return params [0]; };
public void setParameters(double[] doubleArray) {
params [0] = doubleArray [0];
}
public void setParameter(int n, double d) { params[n] = d; }
public double getValue () {
return 3*params[0]*params[0] - 5 * params[0] + 2;
}
public void getValueGradient (double[] buffer)
{
buffer [0] = 3*params [0] - 5;
}
}
static class WrongSimplePoly extends SimplePoly {
public void getValueGradient (double[] buffer)
{
buffer [0] = 3*params [0]; // WRONG: Missing -5
}
}
public void testTestValueAndGradient ()
{
SimplePoly maxable = new SimplePoly ();
testValueAndGradient (maxable);
try {
WrongSimplePoly badMaxable = new WrongSimplePoly ();
testValueAndGradient (badMaxable);
fail ("WrongSimplyPoly should fail testMaxmiziable!");
} catch (Exception e) {}
}
public static Test suite ()
{
return new TestSuite (TestOptimizable.class);
}
protected void setUp ()
{
}
public static void main (String[] args)
{
junit.textui.TestRunner.run (suite());
}
}
| 12,416 | 35.736686 | 127 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/examples/TestCRFPipe.java | package cc.mallet.examples;
import java.io.*;
import java.util.*;
import java.util.regex.*;
import java.util.zip.*;
import cc.mallet.fst.*;
import cc.mallet.pipe.*;
import cc.mallet.pipe.iterator.*;
import cc.mallet.pipe.tsf.*;
import cc.mallet.types.*;
import cc.mallet.util.*;
public class TestCRFPipe {
public TestCRFPipe(String trainingFilename) throws IOException {
ArrayList<Pipe> pipes = new ArrayList<Pipe>();
PrintWriter out = new PrintWriter("test.out");
int[][] conjunctions = new int[3][];
conjunctions[0] = new int[] { -1 };
conjunctions[1] = new int[] { 1 };
conjunctions[2] = new int[] { -2, -1 };
pipes.add(new SimpleTaggerSentence2TokenSequence());
//pipes.add(new FeaturesInWindow("PREV-", -1, 1));
//pipes.add(new FeaturesInWindow("NEXT-", 1, 2));
pipes.add(new OffsetConjunctions(conjunctions));
pipes.add(new TokenTextCharSuffix("C1=", 1));
pipes.add(new TokenTextCharSuffix("C2=", 2));
pipes.add(new TokenTextCharSuffix("C3=", 3));
pipes.add(new RegexMatches("CAPITALIZED", Pattern.compile("^\\p{Lu}.*")));
pipes.add(new RegexMatches("STARTSNUMBER", Pattern.compile("^[0-9].*")));
pipes.add(new RegexMatches("HYPHENATED", Pattern.compile(".*\\-.*")));
pipes.add(new RegexMatches("DOLLARSIGN", Pattern.compile("\\$.*")));
pipes.add(new TokenFirstPosition("FIRSTTOKEN"));
pipes.add(new TokenSequence2FeatureVectorSequence());
pipes.add(new SequencePrintingPipe(out));
Pipe pipe = new SerialPipes(pipes);
InstanceList trainingInstances = new InstanceList(pipe);
trainingInstances.addThruPipe(new LineGroupIterator(new BufferedReader(new InputStreamReader(new GZIPInputStream(new FileInputStream(trainingFilename)))), Pattern.compile("^\\s*$"), true));
out.close();
}
public static void main (String[] args) throws Exception {
TestCRFPipe trainer = new TestCRFPipe(args[0]);
}
} | 1,871 | 31.275862 | 191 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/examples/TrainCRF.java | package cc.mallet.examples;
import java.io.*;
import java.util.*;
import java.util.regex.*;
import java.util.zip.*;
import cc.mallet.fst.*;
import cc.mallet.pipe.*;
import cc.mallet.pipe.iterator.*;
import cc.mallet.pipe.tsf.*;
import cc.mallet.types.*;
import cc.mallet.util.*;
public class TrainCRF {
public TrainCRF(String trainingFilename, String testingFilename) throws IOException {
ArrayList<Pipe> pipes = new ArrayList<Pipe>();
int[][] conjunctions = new int[2][];
conjunctions[0] = new int[] { -1 };
conjunctions[1] = new int[] { 1 };
pipes.add(new SimpleTaggerSentence2TokenSequence());
pipes.add(new OffsetConjunctions(conjunctions));
//pipes.add(new FeaturesInWindow("PREV-", -1, 1));
pipes.add(new TokenTextCharSuffix("C1=", 1));
pipes.add(new TokenTextCharSuffix("C2=", 2));
pipes.add(new TokenTextCharSuffix("C3=", 3));
pipes.add(new RegexMatches("CAPITALIZED", Pattern.compile("^\\p{Lu}.*")));
pipes.add(new RegexMatches("STARTSNUMBER", Pattern.compile("^[0-9].*")));
pipes.add(new RegexMatches("HYPHENATED", Pattern.compile(".*\\-.*")));
pipes.add(new RegexMatches("DOLLARSIGN", Pattern.compile(".*\\$.*")));
pipes.add(new TokenFirstPosition("FIRSTTOKEN"));
pipes.add(new TokenSequence2FeatureVectorSequence());
Pipe pipe = new SerialPipes(pipes);
InstanceList trainingInstances = new InstanceList(pipe);
InstanceList testingInstances = new InstanceList(pipe);
trainingInstances.addThruPipe(new LineGroupIterator(new BufferedReader(new InputStreamReader(new GZIPInputStream(new FileInputStream(trainingFilename)))), Pattern.compile("^\\s*$"), true));
testingInstances.addThruPipe(new LineGroupIterator(new BufferedReader(new InputStreamReader(new GZIPInputStream(new FileInputStream(testingFilename)))), Pattern.compile("^\\s*$"), true));
CRF crf = new CRF(pipe, null);
//crf.addStatesForLabelsConnectedAsIn(trainingInstances);
crf.addStatesForThreeQuarterLabelsConnectedAsIn(trainingInstances);
crf.addStartState();
CRFTrainerByLabelLikelihood trainer =
new CRFTrainerByLabelLikelihood(crf);
trainer.setGaussianPriorVariance(10.0);
//CRFTrainerByStochasticGradient trainer =
//new CRFTrainerByStochasticGradient(crf, 1.0);
//CRFTrainerByL1LabelLikelihood trainer =
// new CRFTrainerByL1LabelLikelihood(crf, 0.75);
//trainer.addEvaluator(new PerClassAccuracyEvaluator(trainingInstances, "training"));
trainer.addEvaluator(new PerClassAccuracyEvaluator(testingInstances, "testing"));
trainer.addEvaluator(new TokenAccuracyEvaluator(testingInstances, "testing"));
trainer.train(trainingInstances);
}
public static void main (String[] args) throws Exception {
TrainCRF trainer = new TrainCRF(args[0], args[1]);
}
} | 2,736 | 36.493151 | 191 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/examples/TrainHMM.java | package cc.mallet.examples;
import java.io.*;
import java.util.*;
import java.util.regex.*;
import java.util.zip.*;
import cc.mallet.fst.*;
import cc.mallet.pipe.*;
import cc.mallet.pipe.iterator.*;
import cc.mallet.pipe.tsf.*;
import cc.mallet.types.*;
import cc.mallet.util.*;
public class TrainHMM {
public TrainHMM(String trainingFilename, String testingFilename) throws IOException {
ArrayList<Pipe> pipes = new ArrayList<Pipe>();
pipes.add(new SimpleTaggerSentence2TokenSequence());
pipes.add(new TokenSequence2FeatureSequence());
Pipe pipe = new SerialPipes(pipes);
InstanceList trainingInstances = new InstanceList(pipe);
InstanceList testingInstances = new InstanceList(pipe);
trainingInstances.addThruPipe(new LineGroupIterator(new BufferedReader(new InputStreamReader(new GZIPInputStream(new FileInputStream(trainingFilename)))), Pattern.compile("^\\s*$"), true));
testingInstances.addThruPipe(new LineGroupIterator(new BufferedReader(new InputStreamReader(new GZIPInputStream(new FileInputStream(testingFilename)))), Pattern.compile("^\\s*$"), true));
HMM hmm = new HMM(pipe, null);
hmm.addStatesForLabelsConnectedAsIn(trainingInstances);
//hmm.addStatesForBiLabelsConnectedAsIn(trainingInstances);
HMMTrainerByLikelihood trainer =
new HMMTrainerByLikelihood(hmm);
TransducerEvaluator trainingEvaluator =
new PerClassAccuracyEvaluator(trainingInstances, "training");
TransducerEvaluator testingEvaluator =
new PerClassAccuracyEvaluator(testingInstances, "testing");
trainer.train(trainingInstances, 10);
trainingEvaluator.evaluate(trainer);
testingEvaluator.evaluate(trainer);
}
public static void main (String[] args) throws Exception {
TrainHMM trainer = new TrainHMM(args[0], args[1]);
}
} | 1,778 | 32.566038 | 191 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/classify/ClassifierEnsemble.java | package cc.mallet.classify;
import cc.mallet.types.Instance;
import cc.mallet.types.LabelVector;
import cc.mallet.types.MatrixOps;
/* Copyright (C) 2005 Univ. of Massachusetts Amherst, Computer Science Dept.
This file is part of "MALLET" (MAchine Learning for LanguagE Toolkit).
http://www.cs.umass.edu/~mccallum/mallet
This software is provided under the terms of the Common Public License,
version 1.0, as published by http://www.opensource.org. For further
information, see the file `LICENSE' included with this distribution. */
/**
* Classifer for an ensemble of classifers, combined with learned weights.
* The procedure is to obtain the score from each classifier (typically p(y|x)),
* perform the weighted sum of these scores, then exponentiate the summed
* score for each class, and re-normalize the resulting per-class scores.
* In other words, the scores of the ensemble classifiers are treated as
* input features in a Maximum Entropy classifier.
* @author <a href="mailto:[email protected]">Andrew McCallum</a>
*/
public class ClassifierEnsemble extends Classifier
{
Classifier[] ensemble;
double[] weights;
public ClassifierEnsemble (Classifier[] classifiers, double[] weights)
{
this.ensemble = new Classifier[classifiers.length];
for (int i = 0; i < classifiers.length; i++) {
if (i > 0 && ensemble[i-1].getLabelAlphabet() != classifiers[i].getLabelAlphabet())
throw new IllegalStateException("LabelAlphabet's do not match.");
ensemble[i] = classifiers[i];
}
System.arraycopy (classifiers, 0, ensemble, 0, classifiers.length);
this.weights = (double[]) weights.clone();
}
public Classification classify (Instance instance)
{
int numLabels = ensemble[0].getLabelAlphabet().size();
double[] scores = new double[numLabels];
// Run each classifier on the instance, summing each one's per-class score, with a weight
for (int i = 0; i < ensemble.length; i++) {
Classification c = ensemble[i].classify(instance);
c.getLabelVector().addTo(scores, weights[i]);
}
// Exponentiate and normalize scores
expNormalize (scores);
return new Classification (instance, this, new LabelVector (ensemble[0].getLabelAlphabet(), scores));
}
private static void expNormalize (double[] a)
{
double max = MatrixOps.max (a);
double sum = 0;
for (int i = 0; i < a.length; i++) {
assert(!Double.isNaN(a[i]));
a[i] = Math.exp (a[i] - max);
sum += a[i];
}
for (int i = 0; i < a.length; i++) {
a[i] /= sum;
}
}
}
| 2,586 | 36.492754 | 105 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/classify/MaxEntOptimizableByLabelDistribution.java | package cc.mallet.classify;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Iterator;
import java.util.logging.Logger;
import cc.mallet.optimize.LimitedMemoryBFGS;
import cc.mallet.optimize.Optimizable;
import cc.mallet.types.Alphabet;
import cc.mallet.types.FeatureSelection;
import cc.mallet.types.FeatureVector;
import cc.mallet.types.Instance;
import cc.mallet.types.InstanceList;
import cc.mallet.types.LabelAlphabet;
import cc.mallet.types.Labeling;
import cc.mallet.types.MatrixOps;
import cc.mallet.util.MalletLogger;
import cc.mallet.util.MalletProgressMessageLogger;
import cc.mallet.util.Maths;
public class MaxEntOptimizableByLabelDistribution implements Optimizable.ByGradientValue //, Serializable TODO needs to be done?
{
private static Logger logger = MalletLogger.getLogger(MaxEntOptimizableByLabelDistribution.class.getName());
private static Logger progressLogger = MalletProgressMessageLogger.getLogger(MaxEntOptimizableByLabelDistribution.class.getName()+"-pl");
// xxx Why does TestMaximizable fail when this variance is very small?
//static final double DEFAULT_GAUSSIAN_PRIOR_VARIANCE = 1;
static final double DEFAULT_GAUSSIAN_PRIOR_VARIANCE = 1.0;
static final Class DEFAULT_MAXIMIZER_CLASS = LimitedMemoryBFGS.class;
double gaussianPriorVariance = DEFAULT_GAUSSIAN_PRIOR_VARIANCE;
Class maximizerClass = DEFAULT_MAXIMIZER_CLASS;
double[] parameters, constraints, cachedGradient;
MaxEnt theClassifier;
InstanceList trainingList;
// The expectations are (temporarily) stored in the cachedGradient
double cachedValue;
boolean cachedValueStale;
boolean cachedGradientStale;
int numLabels;
int numFeatures;
int defaultFeatureIndex; // just for clarity
FeatureSelection featureSelection;
FeatureSelection[] perLabelFeatureSelection;
int numGetValueCalls = 0;
int numGetValueGradientCalls = 0;
public MaxEntOptimizableByLabelDistribution() {
}
public MaxEntOptimizableByLabelDistribution (InstanceList trainingSet, MaxEnt initialClassifier)
{
this.trainingList = trainingSet;
Alphabet fd = trainingSet.getDataAlphabet();
LabelAlphabet ld = (LabelAlphabet) trainingSet.getTargetAlphabet();
// Don't fd.stopGrowth, because someone might want to do feature induction
ld.stopGrowth();
// Add one feature for the "default feature".
this.numLabels = ld.size();
this.numFeatures = fd.size() + 1;
this.defaultFeatureIndex = numFeatures-1;
this.parameters = new double [numLabels * numFeatures];
this.constraints = new double [numLabels * numFeatures];
this.cachedGradient = new double [numLabels * numFeatures];
Arrays.fill (parameters, 0.0);
Arrays.fill (constraints, 0.0);
Arrays.fill (cachedGradient, 0.0);
this.featureSelection = trainingSet.getFeatureSelection();
this.perLabelFeatureSelection = trainingSet.getPerLabelFeatureSelection();
// Add the default feature index to the selection
if (featureSelection != null)
featureSelection.add (defaultFeatureIndex);
if (perLabelFeatureSelection != null)
for (int i = 0; i < perLabelFeatureSelection.length; i++)
perLabelFeatureSelection[i].add (defaultFeatureIndex);
// xxx Later change this to allow both to be set, but select which one to use by a boolean flag?
assert (featureSelection == null || perLabelFeatureSelection == null);
if (initialClassifier != null) {
this.theClassifier = initialClassifier;
this.parameters = theClassifier.parameters;
this.featureSelection = theClassifier.featureSelection;
this.perLabelFeatureSelection = theClassifier.perClassFeatureSelection;
this.defaultFeatureIndex = theClassifier.defaultFeatureIndex;
assert (initialClassifier.getInstancePipe() == trainingSet.getPipe());
}
else if (this.theClassifier == null) {
this.theClassifier = new MaxEnt (trainingSet.getPipe(), parameters, featureSelection, perLabelFeatureSelection);
}
cachedValueStale = true;
cachedGradientStale = true;
// Initialize the constraints
logger.fine("Number of instances in training list = " + trainingList.size());
for (Instance inst : trainingList) {
double instanceWeight = trainingList.getInstanceWeight(inst);
Labeling labeling = inst.getLabeling ();
if (labeling == null)
continue;
//logger.fine ("Instance "+ii+" labeling="+labeling);
FeatureVector fv = (FeatureVector) inst.getData ();
Alphabet fdict = fv.getAlphabet();
assert (fv.getAlphabet() == fd);
// Here is the difference between this code and the single label
// version: rather than only picking out the "best" index,
// loop over all label indices.
for (int pos = 0; pos < labeling.numLocations(); pos++){
MatrixOps.rowPlusEquals (constraints, numFeatures,
labeling.indexAtLocation(pos),
fv,
instanceWeight*labeling.valueAtLocation(pos));
}
assert(!Double.isNaN(instanceWeight)) : "instanceWeight is NaN";
boolean hasNaN = false;
for (int i = 0; i < fv.numLocations(); i++) {
if (Double.isNaN(fv.valueAtLocation(i))) {
logger.info("NaN for feature " + fdict.lookupObject(fv.indexAtLocation(i)).toString());
hasNaN = true;
}
}
if (hasNaN)
logger.info("NaN in instance: " + inst.getName());
// For the default feature, whose weight is 1.0
for (int pos = 0; pos < labeling.numLocations(); pos++) {
constraints[labeling.indexAtLocation(pos)*numFeatures + defaultFeatureIndex] +=
1.0 * instanceWeight * labeling.value(labeling.indexAtLocation(pos));
}
}
}
public MaxEnt getClassifier () { return theClassifier; }
public double getParameter (int index) {
return parameters[index];
}
public void setParameter (int index, double v) {
cachedValueStale = true;
cachedGradientStale = true;
parameters[index] = v;
}
public int getNumParameters() {
return parameters.length;
}
public void getParameters (double[] buff) {
if (buff == null || buff.length != parameters.length)
buff = new double [parameters.length];
System.arraycopy (parameters, 0, buff, 0, parameters.length);
}
public void setParameters (double [] buff) {
assert (buff != null);
cachedValueStale = true;
cachedGradientStale = true;
if (buff.length != parameters.length)
parameters = new double[buff.length];
System.arraycopy (buff, 0, parameters, 0, buff.length);
}
/** Return the log probability of the training label distributions */
public double getValue () {
if (cachedValueStale) {
numGetValueCalls++;
cachedValue = 0;
// We'll store the expectation values in "cachedGradient" for now
cachedGradientStale = true;
MatrixOps.setAll (cachedGradient, 0.0);
// Incorporate likelihood of data
double[] scores = new double[trainingList.getTargetAlphabet().size()];
double value = 0.0;
Iterator<Instance> iter = trainingList.iterator();
int ii=0;
while (iter.hasNext()) {
ii++;
Instance instance = iter.next();
double instanceWeight = trainingList.getInstanceWeight(instance);
Labeling labeling = instance.getLabeling ();
if (labeling == null)
continue;
//System.out.println("L Now "+inputAlphabet.size()+" regular features.");
this.theClassifier.getClassificationScores (instance, scores);
FeatureVector fv = (FeatureVector) instance.getData ();
value = 0.0;
for(int pos = 0; pos < labeling.numLocations(); pos++) { //loop, added by Limin Yao
int ll = labeling.indexAtLocation(pos);
value -= (instanceWeight * labeling.valueAtLocation(pos) * Math.log (scores[ll]));
}
if (Double.isNaN(value)) {
logger.fine ("MaxEntOptimizableByLabelDistribution: Instance " + instance.getName() +
"has NaN value.");
}
if (Double.isInfinite(value)) {
logger.warning ("Instance "+instance.getSource() + " has infinite value; skipping value and gradient");
cachedValue -= value;
cachedValueStale = false;
return -value;
// continue;
}
cachedValue += value;
//The model expectation? added by Limin Yao
for (int si = 0; si < scores.length; si++) {
if (scores[si] == 0) continue;
assert (!Double.isInfinite(scores[si]));
MatrixOps.rowPlusEquals (cachedGradient, numFeatures,
si, fv, -instanceWeight * scores[si]);
cachedGradient[numFeatures*si + defaultFeatureIndex] += (-instanceWeight * scores[si]);
}
}
//logger.info ("-Expectations:"); cachedGradient.print();
// Incorporate prior on parameters
double prior = 0;
for (int li = 0; li < numLabels; li++) {
for (int fi = 0; fi < numFeatures; fi++) {
double param = parameters[li*numFeatures + fi];
prior += param * param / (2 * gaussianPriorVariance);
}
}
double oValue = cachedValue;
cachedValue += prior;
cachedValue *= -1.0; // MAXIMIZE, NOT MINIMIZE
cachedValueStale = false;
progressLogger.info ("Value (labelProb="+oValue+" prior="+prior+") loglikelihood = "+cachedValue);
}
return cachedValue;
}
public void getValueGradient (double [] buffer)
{
// Gradient is (constraint - expectation - parameters/gaussianPriorVariance)
if (cachedGradientStale) {
numGetValueGradientCalls++;
if (cachedValueStale)
// This will fill in the cachedGradient with the "-expectation"
getValue ();
MatrixOps.plusEquals (cachedGradient, constraints);
// Incorporate prior on parameters
MatrixOps.plusEquals (cachedGradient, parameters,
-1.0 / gaussianPriorVariance);
// A parameter may be set to -infinity by an external user.
// We set gradient to 0 because the parameter's value can
// never change anyway and it will mess up future calculations
// on the matrix, such as norm().
MatrixOps.substitute (cachedGradient, Double.NEGATIVE_INFINITY, 0.0);
// Set to zero all the gradient dimensions that are not among the selected features
if (perLabelFeatureSelection == null) {
for (int labelIndex = 0; labelIndex < numLabels; labelIndex++)
MatrixOps.rowSetAll (cachedGradient, numFeatures,
labelIndex, 0.0, featureSelection, false);
} else {
for (int labelIndex = 0; labelIndex < numLabels; labelIndex++)
MatrixOps.rowSetAll (cachedGradient, numFeatures,
labelIndex, 0.0,
perLabelFeatureSelection[labelIndex], false);
}
cachedGradientStale = false;
}
assert (buffer != null && buffer.length == parameters.length);
System.arraycopy (cachedGradient, 0, buffer, 0, cachedGradient.length);
//System.out.println ("MaxEntTrainer gradient infinity norm = "+MatrixOps.infinityNorm(cachedGradient));
}
// XXX Should these really be public? Why?
/** Counts how many times this trainer has computed the gradient of the
* log probability of training labels. */
public int getValueGradientCalls() {return numGetValueGradientCalls;}
/** Counts how many times this trainer has computed the
* log probability of training labels. */
public int getValueCalls() {return numGetValueCalls;}
// public int getIterations() {return maximizerByGradient.getIterations();}
public MaxEntOptimizableByLabelDistribution useGaussianPrior () {
return this;
}
/**
* Sets a parameter to prevent overtraining. A smaller variance for the prior
* means that feature weights are expected to hover closer to 0, so extra
* evidence is required to set a higher weight.
* @return This trainer
*/
public MaxEntOptimizableByLabelDistribution setGaussianPriorVariance (double gaussianPriorVariance)
{
this.gaussianPriorVariance = gaussianPriorVariance;
return this;
}
}
| 11,537 | 36.099678 | 138 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/classify/MaxEntTrainer.java | /* Copyright (C) 2002 Univ. of Massachusetts Amherst, Computer Science Dept.
This file is part of "MALLET" (MAchine Learning for LanguagE Toolkit).
http://www.cs.umass.edu/~mccallum/mallet
This software is provided under the terms of the Common Public License,
version 1.0, as published by http://www.opensource.org. For further
information, see the file `LICENSE' included with this distribution. */
package cc.mallet.classify;
import java.util.logging.*;
import java.util.*;
import java.io.*;
import cc.mallet.classify.Classifier;
import cc.mallet.optimize.ConjugateGradient;
import cc.mallet.optimize.InvalidOptimizableException;
import cc.mallet.optimize.LimitedMemoryBFGS;
import cc.mallet.optimize.Optimizable;
import cc.mallet.optimize.OptimizationException;
import cc.mallet.optimize.Optimizer;
import cc.mallet.optimize.OrthantWiseLimitedMemoryBFGS;
import cc.mallet.optimize.tests.*;
import cc.mallet.pipe.Pipe;
import cc.mallet.types.Alphabet;
import cc.mallet.types.ExpGain;
import cc.mallet.types.FeatureInducer;
import cc.mallet.types.FeatureSelection;
import cc.mallet.types.FeatureVector;
import cc.mallet.types.GradientGain;
import cc.mallet.types.InfoGain;
import cc.mallet.types.Instance;
import cc.mallet.types.InstanceList;
import cc.mallet.types.Label;
import cc.mallet.types.LabelAlphabet;
import cc.mallet.types.LabelVector;
import cc.mallet.types.Labeling;
import cc.mallet.types.MatrixOps;
import cc.mallet.types.RankedFeatureVector;
import cc.mallet.types.Vector;
import cc.mallet.util.CommandOption;
import cc.mallet.util.MalletLogger;
import cc.mallet.util.MalletProgressMessageLogger;
import cc.mallet.util.Maths;
//Does not currently handle instances that are labeled with distributions
//instead of a single label.
/**
* The trainer for a Maximum Entropy classifier.
@author Andrew McCallum <a href="mailto:[email protected]">[email protected]</a>
*/
public class MaxEntTrainer extends ClassifierTrainer<MaxEnt>
implements ClassifierTrainer.ByOptimization<MaxEnt>, Boostable, Serializable {
private static Logger logger = MalletLogger.getLogger(MaxEntTrainer.class.getName());
private static Logger progressLogger = MalletProgressMessageLogger.getLogger(MaxEntTrainer.class.getName()+"-pl");
int numIterations = Integer.MAX_VALUE;
public static final String EXP_GAIN = "exp";
public static final String GRADIENT_GAIN = "grad";
public static final String INFORMATION_GAIN = "info";
// xxx Why does TestMaximizable fail when this variance is very small?
static final double DEFAULT_GAUSSIAN_PRIOR_VARIANCE = 1;
static final double DEFAULT_L1_WEIGHT = 0.0;
static final Class DEFAULT_MAXIMIZER_CLASS = LimitedMemoryBFGS.class;
double gaussianPriorVariance = DEFAULT_GAUSSIAN_PRIOR_VARIANCE;
double l1Weight = DEFAULT_L1_WEIGHT;
Class maximizerClass = DEFAULT_MAXIMIZER_CLASS;
InstanceList trainingSet = null;
MaxEnt initialClassifier;
MaxEntOptimizableByLabelLikelihood optimizable = null;
Optimizer optimizer = null;
//
// CONSTRUCTORS
//
public MaxEntTrainer () {}
/** Construct a MaxEnt trainer using a trained classifier as
* initial values.
*/
public MaxEntTrainer (MaxEnt theClassifierToTrain) {
this.initialClassifier = theClassifierToTrain;
}
/** Constructs a trainer with a parameter to avoid overtraining. 1.0 is
* the default value. */
public MaxEntTrainer (double gaussianPriorVariance) {
this.gaussianPriorVariance = gaussianPriorVariance;
}
//
// CLASSIFIER OBJECT: stores parameters
//
public MaxEnt getClassifier () {
if (optimizable != null)
return optimizable.getClassifier();
return initialClassifier;
}
/**
* Initialize parameters using the provided classifier.
*/
public void setClassifier (MaxEnt theClassifierToTrain) {
// Is this necessary? What is the caller is about to set the training set to something different? -akm
assert (trainingSet == null || Alphabet.alphabetsMatch(theClassifierToTrain, trainingSet));
if (this.initialClassifier != theClassifierToTrain) {
this.initialClassifier = theClassifierToTrain;
optimizable = null;
optimizer = null;
}
}
//
// OPTIMIZABLE OBJECT: implements value and gradient functions
//
public Optimizable getOptimizable () {
return optimizable;
}
public MaxEntOptimizableByLabelLikelihood getOptimizable (InstanceList trainingSet) {
return getOptimizable(trainingSet, getClassifier());
}
public MaxEntOptimizableByLabelLikelihood getOptimizable (InstanceList trainingSet, MaxEnt initialClassifier) {
if (trainingSet != this.trainingSet || this.initialClassifier != initialClassifier) {
this.trainingSet = trainingSet;
this.initialClassifier = initialClassifier;
if (optimizable == null || optimizable.trainingList != trainingSet) {
optimizable = new MaxEntOptimizableByLabelLikelihood (trainingSet, initialClassifier);
if (l1Weight == 0.0) {
optimizable.setGaussianPriorVariance(gaussianPriorVariance);
}
else {
// the prior term for L1-regularized classifiers
// is implemented as part of the optimizer,
// so don't include a prior calculation in the value and
// gradient functions.
optimizable.useNoPrior();
}
optimizer = null;
}
}
return optimizable;
}
//
// OPTIMIZER OBJECT: maximizes value function
//
public Optimizer getOptimizer () {
if (optimizer == null && optimizable != null) {
optimizer = new ConjugateGradient(optimizable);
}
return optimizer;
}
/** This method is called by the train method.
* This is the main entry point for the optimizable and optimizer
* compontents.
*/
public Optimizer getOptimizer (InstanceList trainingSet) {
// If the data is not set, or has changed,
// initialize the optimizable object and
// replace the optimizer.
if (trainingSet != this.trainingSet ||
optimizable == null) {
getOptimizable(trainingSet);
optimizer = null;
}
// Build a new optimizer
if (optimizer == null) {
// If l1Weight is 0, this devolves to
// standard L-BFGS, but the implementation
// may be faster.
optimizer = new LimitedMemoryBFGS(optimizable);
//OrthantWiseLimitedMemoryBFGS(optimizable, l1Weight);
}
return optimizer;
}
/**
* Specifies the maximum number of iterations to run during a single call
* to <code>train</code> or <code>trainWithFeatureInduction</code>. Not
* currently functional.
* @return This trainer
*/
// XXX Since we maximize before using numIterations, this doesn't work.
// Is that a bug? If so, should the default numIterations be higher?
public MaxEntTrainer setNumIterations (int i) {
numIterations = i;
return this;
}
public int getIteration () {
if (optimizable == null)
return 0;
else
return Integer.MAX_VALUE;
// return optimizer.getIteration ();
}
/**
* Sets a parameter to prevent overtraining. A smaller variance for the prior
* means that feature weights are expected to hover closer to 0, so extra
* evidence is required to set a higher weight.
* @return This trainer
*/
public MaxEntTrainer setGaussianPriorVariance (double gaussianPriorVariance) {
this.gaussianPriorVariance = gaussianPriorVariance;
return this;
}
/**
* Use an L1 prior. Larger values mean parameters will be closer to 0.
* Note that this setting overrides any Gaussian prior.
*/
public MaxEntTrainer setL1Weight(double l1Weight) {
this.l1Weight = l1Weight;
return this;
}
public MaxEnt train (InstanceList trainingSet) {
return train (trainingSet, numIterations);
}
public MaxEnt train (InstanceList trainingSet, int numIterations)
{
logger.fine ("trainingSet.size() = "+trainingSet.size());
getOptimizer (trainingSet); // This will set this.optimizer, this.optimizable
for (int i = 0; i < numIterations; i++) {
try {
finishedTraining = optimizer.optimize (1);
} catch (InvalidOptimizableException e) {
e.printStackTrace();
logger.warning("Catching InvalidOptimizatinException! saying converged.");
finishedTraining = true;
} catch (OptimizationException e) {
e.printStackTrace();
logger.info ("Catching OptimizationException; saying converged.");
finishedTraining = true;
}
if (finishedTraining)
break;
}
// only if any number of iterations is allowed
if (numIterations == Integer.MAX_VALUE) {
// Run it again because in our and Sam Roweis' experience, BFGS can still
// eke out more likelihood after first convergence by re-running without
// being restricted by its gradient history.
optimizer = null;
getOptimizer(trainingSet);
try {
finishedTraining = optimizer.optimize ();
} catch (InvalidOptimizableException e) {
e.printStackTrace();
logger.warning("Catching InvalidOptimizatinException! saying converged.");
finishedTraining = true;
} catch (OptimizationException e) {
e.printStackTrace();
logger.info ("Catching OptimizationException; saying converged.");
finishedTraining = true;
}
}
//TestMaximizable.testValueAndGradientCurrentParameters (mt);
progressLogger.info("\n"); // progress messages are on one line; move on.
//logger.info("MaxEnt ngetValueCalls:"+getValueCalls()+"\nMaxEnt ngetValueGradientCalls:"+getValueGradientCalls());
return optimizable.getClassifier();
}
/**
* <p>Trains a maximum entropy model using feature selection and feature induction
* (adding conjunctions of features as new features).</p>
*
* @param trainingData A list of <code>Instance</code>s whose <code>data</code>
* fields are binary, augmentable <code>FeatureVector</code>s.
* and whose <code>target</code> fields are <code>Label</code>s.
* @param validationData [not currently used] As <code>trainingData</code>,
* or <code>null</code>.
* @param testingData As <code>trainingData</code>, or <code>null</code>.
* @param evaluator The evaluator to track training progress and decide whether
* to continue, or <code>null</code>.
* @param totalIterations The maximum total number of training iterations,
* including those taken during feature induction.
* @param numIterationsBetweenFeatureInductions How many iterations to train
* between one round of feature induction and the next; this should usually
* be fairly small, like 5 or 10, to avoid overfitting with current features.
* @param numFeatureInductions How many rounds of feature induction to run
* before beginning normal training.
* @param numFeaturesPerFeatureInduction The maximum number of features to
* choose during each round of featureInduction.
*
* @return The trained <code>MaxEnt</code> classifier
*/
/*
// added - [email protected]
public Classifier trainWithFeatureInduction (InstanceList trainingData,
int totalIterations,
int numIterationsBetweenFeatureInductions,
int numFeatureInductions,
int numFeaturesPerFeatureInduction) {
return trainWithFeatureInduction (trainingData,
null,
totalIterations,
numIterationsBetweenFeatureInductions,
numFeatureInductions,
numFeaturesPerFeatureInduction,
EXP_GAIN);
}
*/
/**
* <p>Like the other version of <code>trainWithFeatureInduction</code>, but
* allows some default options to be changed.</p>
*
* @param maxent An initial partially-trained classifier (default <code>null</code>).
* This classifier may be modified during training.
* @param gainName The estimate of gain (log-likelihood increase) we want our chosen
* features to maximize.
* Should be one of <code>MaxEntTrainer.EXP_GAIN</code>,
* <code>MaxEntTrainer.GRADIENT_GAIN</code>, or
* <code>MaxEntTrainer.INFORMATION_GAIN</code> (default <code>EXP_GAIN</code>).
*
* @return The trained <code>MaxEnt</code> classifier
*/
/* // Temporarily removed until I figure out how to handle induceFeaturesFor (testData)
public Classifier trainWithFeatureInduction (InstanceList trainingData,
int totalIterations,
int numIterationsBetweenFeatureInductions,
int numFeatureInductions,
int numFeaturesPerFeatureInduction,
String gainName) {
// XXX This ought to be a parameter, except that setting it to true can
// crash training ("Jump too small").
boolean saveParametersDuringFI = false;
Alphabet inputAlphabet = trainingData.getDataAlphabet();
Alphabet outputAlphabet = trainingData.getTargetAlphabet();
int trainingIteration = 0;
int numLabels = outputAlphabet.size();
MaxEnt maxent = getClassifier();
// Initialize feature selection
FeatureSelection globalFS = trainingData.getFeatureSelection();
if (globalFS == null) {
// Mask out all features; some will be added later by FeatureInducer.induceFeaturesFor(.)
globalFS = new FeatureSelection (trainingData.getDataAlphabet());
trainingData.setFeatureSelection (globalFS);
}
//if (validationData != null) validationData.setFeatureSelection (globalFS);
//if (testingData != null) testingData.setFeatureSelection (globalFS);
getOptimizer(trainingData); // This will initialize this.me so getClassifier() below works
maxent.setFeatureSelection(globalFS);
// Run feature induction
for (int featureInductionIteration = 0; featureInductionIteration < numFeatureInductions; featureInductionIteration++) {
// Print out some feature information
logger.info ("Feature induction iteration "+featureInductionIteration);
// Train the model a little bit. We don't care whether it converges; we
// execute all feature induction iterations no matter what.
if (featureInductionIteration != 0) {
// Don't train until we have added some features
setNumIterations(numIterationsBetweenFeatureInductions);
train (trainingData);
}
trainingIteration += numIterationsBetweenFeatureInductions;
logger.info ("Starting feature induction with "+(1+inputAlphabet.size())+
" features over "+numLabels+" labels.");
// Create the list of error tokens
InstanceList errorInstances = new InstanceList (trainingData.getDataAlphabet(),
trainingData.getTargetAlphabet());
// This errorInstances.featureSelection will get examined by FeatureInducer,
// so it can know how to add "new" singleton features
errorInstances.setFeatureSelection (globalFS);
List errorLabelVectors = new ArrayList(); // these are length-1 vectors
for (int i = 0; i < trainingData.size(); i++) {
Instance instance = trainingData.get(i);
FeatureVector inputVector = (FeatureVector) instance.getData();
Label trueLabel = (Label) instance.getTarget();
// Having trained using just the current features, see how we classify
// the training data now.
Classification classification = maxent.classify(instance);
if (!classification.bestLabelIsCorrect()) {
errorInstances.add(inputVector, trueLabel, null, null);
errorLabelVectors.add(classification.getLabelVector());
}
}
logger.info ("Error instance list size = "+errorInstances.size());
int s = errorLabelVectors.size();
LabelVector[] lvs = new LabelVector[s];
for (int i = 0; i < s; i++) {
lvs[i] = (LabelVector)errorLabelVectors.get(i);
}
RankedFeatureVector.Factory gainFactory = null;
if (gainName.equals (EXP_GAIN))
gainFactory = new ExpGain.Factory (lvs, gaussianPriorVariance);
else if (gainName.equals(GRADIENT_GAIN))
gainFactory = new GradientGain.Factory (lvs);
else if (gainName.equals(INFORMATION_GAIN))
gainFactory = new InfoGain.Factory ();
else
throw new IllegalArgumentException("Unsupported gain name: "+gainName);
FeatureInducer klfi =
new FeatureInducer (gainFactory,
errorInstances,
numFeaturesPerFeatureInduction,
2*numFeaturesPerFeatureInduction,
2*numFeaturesPerFeatureInduction);
// Note that this adds features globally, but not on a per-transition basis
klfi.induceFeaturesFor (trainingData, false, false);
if (testingData != null) klfi.induceFeaturesFor (testingData, false, false);
logger.info ("MaxEnt FeatureSelection now includes "+globalFS.cardinality()+" features");
klfi = null;
double[] newParameters = new double[(1+inputAlphabet.size()) * outputAlphabet.size()];
// XXX (Executing this block often causes an error during training; I don't know why.)
if (saveParametersDuringFI) {
// Keep current parameter values
// XXX This relies on the implementation detail that the most recent features
// added to an Alphabet get the highest indices.
// Count parameters per output label
int oldParamCount = maxent.parameters.length / outputAlphabet.size();
int newParamCount = 1+inputAlphabet.size();
// Copy params into the proper locations
for (int i=0; i<outputAlphabet.size(); i++) {
System.arraycopy(maxent.parameters, i*oldParamCount,
newParameters, i*newParamCount,
oldParamCount);
}
for (int i=0; i<oldParamCount; i++)
if (maxent.parameters[i] != newParameters[i]) {
System.out.println(maxent.parameters[i]+" "+newParameters[i]);
System.exit(0);
}
}
maxent.parameters = newParameters;
maxent.defaultFeatureIndex = inputAlphabet.size();
}
// Finished feature induction
logger.info("Ended with "+globalFS.cardinality()+" features.");
setNumIterations(totalIterations - trainingIteration);
train (trainingData);
return maxent;
}
*/
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("MaxEntTrainer");
if (numIterations < Integer.MAX_VALUE) {
builder.append(",numIterations=" + numIterations);
}
if (l1Weight != 0.0) {
builder.append(",l1Weight=" + l1Weight);
}
else {
builder.append(",gaussianPriorVariance=" + gaussianPriorVariance);
}
return builder.toString();
}
}
| 17,756 | 34.443114 | 122 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/classify/Boostable.java | /* Copyright (C) 2002 Univ. of Massachusetts Amherst, Computer Science Dept.
This file is part of "MALLET" (MAchine Learning for LanguagE Toolkit).
http://www.cs.umass.edu/~mccallum/mallet
This software is provided under the terms of the Common Public License,
version 1.0, as published by http://www.opensource.org. For further
information, see the file `LICENSE' included with this distribution. */
/**
@author Andrew McCallum <a href="mailto:[email protected]">[email protected]</a>
*/
package cc.mallet.classify;
/** This interface is a tag indicating that the classifier attends to the
InstanceList.getInstanceWeight() weights when training. */
public interface Boostable
{
}
| 719 | 30.304348 | 91 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/classify/FeatureSelectingClassifierTrainer.java | /* Copyright (C) 2002 Univ. of Massachusetts Amherst, Computer Science Dept.
This file is part of "MALLET" (MAchine Learning for LanguagE Toolkit).
http://www.cs.umass.edu/~mccallum/mallet
This software is provided under the terms of the Common Public License,
version 1.0, as published by http://www.opensource.org. For further
information, see the file `LICENSE' included with this distribution. */
package cc.mallet.classify;
import java.io.*;
import java.util.*;
import cc.mallet.classify.Classifier;
import cc.mallet.types.FeatureSelector;
import cc.mallet.types.Instance;
import cc.mallet.types.InstanceList;
import cc.mallet.util.BshInterpreter;
import cc.mallet.util.CommandOption;
/**
* Adaptor for adding feature selection to a classifier trainer.
@author Andrew McCallum <a href="mailto:[email protected]">[email protected]</a>
*/
public class FeatureSelectingClassifierTrainer extends ClassifierTrainer
{
ClassifierTrainer underlyingTrainer;
FeatureSelector featureSelector;
Classifier classifier;
public Classifier getClassifier () { return classifier; }
public FeatureSelectingClassifierTrainer (ClassifierTrainer underlyingTrainer,
FeatureSelector featureSelector)
{
this.underlyingTrainer = underlyingTrainer;
this.featureSelector = featureSelector;
}
public Classifier train (InstanceList trainingSet)
{
featureSelector.selectFeaturesFor (trainingSet);
// TODO What about also selecting features for the validation set?
this.classifier = underlyingTrainer.train (trainingSet);
return classifier;
}
}
| 1,601 | 30.411765 | 91 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/classify/BaggingTrainer.java | /* Copyright (C) 2002 Univ. of Massachusetts Amherst, Computer Science Dept.
This file is part of "MALLET" (MAchine Learning for LanguagE Toolkit).
http://www.cs.umass.edu/~mccallum/mallet
This software is provided under the terms of the Common Public License,
version 1.0, as published by http://www.opensource.org. For further
information, see the file `LICENSE' included with this distribution. */
package cc.mallet.classify;
import cc.mallet.types.*;
/**
Bagging Trainer.
@author Andrew McCallum <a href="mailto:[email protected]">[email protected]</a>
*/
public class BaggingTrainer extends ClassifierTrainer<BaggingClassifier>
{
ClassifierTrainer.Factory underlyingTrainer;
int numBags;
BaggingClassifier classifier;
public BaggingClassifier getClassifier () { return classifier; }
public BaggingTrainer (ClassifierTrainer.Factory underlyingTrainerFactory, int numBags)
{
this.underlyingTrainer = underlyingTrainerFactory;
this.numBags = numBags;
}
public BaggingTrainer (ClassifierTrainer.Factory underlyingTrainerFactory)
{
this (underlyingTrainerFactory, 10);
}
public BaggingClassifier train (InstanceList trainingList)
{
Classifier[] classifiers = new Classifier[numBags];
java.util.Random r = new java.util.Random ();
for (int round = 0; round < numBags; round++) {
InstanceList bag = trainingList.sampleWithReplacement (r, trainingList.size());
classifiers[round] = underlyingTrainer.newClassifierTrainer().train (bag);
}
this.classifier = new BaggingClassifier (trainingList.getPipe(), classifiers);
return classifier;
}
}
| 1,616 | 30.096154 | 91 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/classify/RankMaxEntTrainer.java | /* Copyright (C) 2002 Univ. of Massachusetts Amherst, Computer Science Dept.
This file is part of "MALLET" (MAchine Learning for LanguagE Toolkit).
http://www.cs.umass.edu/~mccallum/mallet
This software is provided under the terms of the Common Public License,
version 1.0, as published by http://www.opensource.org. For further
information, see the file `LICENSE' included with this distribution. */
package cc.mallet.classify;
//package edu.umass.cs.mallet.users.culotta.cluster.classify;
//import edu.umass.cs.mallet.base.classify.*;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.logging.Logger;
import cc.mallet.optimize.ConjugateGradient;
import cc.mallet.optimize.LimitedMemoryBFGS;
import cc.mallet.optimize.Optimizable;
import cc.mallet.optimize.Optimizer;
import cc.mallet.types.Alphabet;
import cc.mallet.types.ExpGain;
import cc.mallet.types.FeatureInducer;
import cc.mallet.types.FeatureSelection;
import cc.mallet.types.FeatureVector;
import cc.mallet.types.FeatureVectorSequence;
import cc.mallet.types.GradientGain;
import cc.mallet.types.InfoGain;
import cc.mallet.types.Instance;
import cc.mallet.types.InstanceList;
import cc.mallet.types.Label;
import cc.mallet.types.LabelAlphabet;
import cc.mallet.types.LabelVector;
import cc.mallet.types.Labels;
import cc.mallet.types.MatrixOps;
import cc.mallet.types.RankedFeatureVector;
import cc.mallet.util.CommandOption;
import cc.mallet.util.MalletLogger;
import cc.mallet.util.MalletProgressMessageLogger;
import cc.mallet.util.Maths;
/**
* The trainer for a {@link RankMaxEnt} classifier. Expects Instance data to be a
* FeatureVectorSequence, and the target to be a String representation of the
* index of the true best FeatureVectorSequence. Note that the Instance target
* may be a Labels to indicate a tie for the best Instance.
*
* @author Aron Culotta <a href="mailto:[email protected]">[email protected]</a>
*/
public class RankMaxEntTrainer extends MaxEntTrainer
{
private static Logger logger = MalletLogger.getLogger(RankMaxEntTrainer.class.getName());
private static Logger progressLogger = MalletProgressMessageLogger.getLogger(RankMaxEntTrainer.class.getName()+"-pl");
public RankMaxEntTrainer () {
}
/** Constructs a trainer with a parameter to avoid overtraining. 1.0 is
* usually a reasonable default value. */
public RankMaxEntTrainer (double gaussianPriorVariance)
{
super (gaussianPriorVariance);
}
public Optimizable.ByGradientValue getMaximizableTrainer (InstanceList ilist)
{
if (ilist == null)
return new MaximizableTrainer ();
return new MaximizableTrainer (ilist, null);
}
public MaxEnt train (InstanceList trainingSet)
{
logger.fine ("trainingSet.size() = "+trainingSet.size());
RankMaxEntTrainer.MaximizableTrainer mt =
new RankMaxEntTrainer.MaximizableTrainer (trainingSet, (RankMaxEnt)initialClassifier);
Optimizer maximizer = new LimitedMemoryBFGS(mt);
// maximizer.optimize (); // XXX given the loop below, this seems wrong.
boolean converged;
for (int i = 0; i < numIterations; i++) {
try {
converged = maximizer.optimize (1);
} catch (IllegalArgumentException e) {
e.printStackTrace();
logger.info ("Catching exception; saying converged.");
converged = true;
}
if (converged)
break;
}
if (numIterations == Integer.MAX_VALUE) {
// Run it again because in our and Sam Roweis' experience, BFGS can still
// eke out more likelihood after first convergence by re-running without
// being restricted by its gradient history.
optimizer = new ConjugateGradient(mt);
try {
optimizer.optimize ();
} catch (IllegalArgumentException e) {
e.printStackTrace();
logger.info ("Catching exception; saying converged.");
}
}
progressLogger.info("\n"); // progess messages are on one line; move on.
return mt.getClassifier ();
}
// xxx this won't work here.. must fix.
/**
* <p>Like the other version of <code>trainWithFeatureInduction</code>, but
* allows some default options to be changed.</p>
*
* @param maxent An initial partially-trained classifier (default <code>null</code>).
* This classifier may be modified during training.
* @param gainName The estimate of gain (log-likelihood increase) we want our chosen
* features to maximize.
* Should be one of <code>MaxEntTrainer.EXP_GAIN</code>,
* <code>MaxEntTrainer.GRADIENT_GAIN</code>, or
* <code>MaxEntTrainer.INFORMATION_GAIN</code> (default <code>EXP_GAIN</code>).
*
* @return The trained <code>MaxEnt</code> classifier
*/
/*
public Classifier trainWithFeatureInduction (InstanceList trainingData,
InstanceList validationData,
InstanceList testingData,
ClassifierEvaluating evaluator,
MaxEnt maxent,
int totalIterations,
int numIterationsBetweenFeatureInductions,
int numFeatureInductions,
int numFeaturesPerFeatureInduction,
String gainName) {
// XXX This ought to be a parameter, except that setting it to true can
// crash training ("Jump too small").
boolean saveParametersDuringFI = false;
Alphabet inputAlphabet = trainingData.getDataAlphabet();
Alphabet outputAlphabet = trainingData.getTargetAlphabet();
if (maxent == null)
maxent = new RankMaxEnt(trainingData.getPipe(),
new double[(1+inputAlphabet.size()) * outputAlphabet.size()]);
int trainingIteration = 0;
int numLabels = outputAlphabet.size();
// Initialize feature selection
FeatureSelection globalFS = trainingData.getFeatureSelection();
if (globalFS == null) {
// Mask out all features; some will be added later by FeatureInducer.induceFeaturesFor(.)
globalFS = new FeatureSelection (trainingData.getDataAlphabet());
trainingData.setFeatureSelection (globalFS);
}
if (validationData != null) validationData.setFeatureSelection (globalFS);
if (testingData != null) testingData.setFeatureSelection (globalFS);
maxent = new RankMaxEnt(maxent.getInstancePipe(), maxent.getParameters(), globalFS);
// Run feature induction
for (int featureInductionIteration = 0;
featureInductionIteration < numFeatureInductions;
featureInductionIteration++) {
// Print out some feature information
logger.info ("Feature induction iteration "+featureInductionIteration);
// Train the model a little bit. We don't care whether it converges; we
// execute all feature induction iterations no matter what.
if (featureInductionIteration != 0) {
// Don't train until we have added some features
setNumIterations(numIterationsBetweenFeatureInductions);
maxent = (RankMaxEnt)this.train (trainingData, validationData, testingData, evaluator,
maxent);
}
trainingIteration += numIterationsBetweenFeatureInductions;
logger.info ("Starting feature induction with "+(1+inputAlphabet.size())+
" features over "+numLabels+" labels.");
// Create the list of error tokens
// InstanceList errorInstances = new InstanceList (trainingData.getDataAlphabet(),
// trainingData.getTargetAlphabet());
InstanceList errorInstances = new InstanceList (inputAlphabet, outputAlphabet);
// This errorInstances.featureSelection will get examined by FeatureInducer,
// so it can know how to add "new" singleton features
errorInstances.setFeatureSelection (globalFS);
List errorLabelVectors = new ArrayList(); // these are length-1 vectors
for (int i = 0; i < trainingData.size(); i++) {
Instance inst = trainingData.get(i);
// Having trained using just the current features, see how we classify
// the training data now.
Classification classification = maxent.classify(inst);
if (!classification.bestLabelIsCorrect()) {
InstanceList il = (InstanceList) inst.getData();
Instance subInstance =
il.get(((Integer)inst.getLabeling().getBestLabel().getEntry()).intValue());
errorInstances.add(subInstance);
errorLabelVectors.add(classification.getLabelVector());
// errorLabelVectors.add(createLabelVector(subInstance, classification));
}
}
logger.info ("Error instance list size = "+errorInstances.size());
int s = errorLabelVectors.size();
LabelVector[] lvs = new LabelVector[s];
for (int i = 0; i < s; i++) {
lvs[i] = (LabelVector)errorLabelVectors.get(i);
}
RankedFeatureVector.Factory gainFactory = null;
if (gainName.equals (EXP_GAIN))
gainFactory = new ExpGain.Factory (lvs, gaussianPriorVariance);
else if (gainName.equals(GRADIENT_GAIN))
gainFactory = new GradientGain.Factory (lvs);
else if (gainName.equals(INFORMATION_GAIN))
gainFactory = new InfoGain.Factory ();
else
throw new IllegalArgumentException("Unsupported gain name: "+gainName);
FeatureInducer klfi =
new FeatureInducer (gainFactory,
errorInstances,
numFeaturesPerFeatureInduction,
2*numFeaturesPerFeatureInduction,
2*numFeaturesPerFeatureInduction);
// Note that this adds features globally, but not on a per-transition basis
klfi.induceFeaturesFor (trainingData, false, false);
if (testingData != null) klfi.induceFeaturesFor (testingData, false, false);
logger.info ("MaxEnt FeatureSelection now includes "+globalFS.cardinality()+" features");
klfi = null;
double[] newParameters = new double[(1+inputAlphabet.size()) * outputAlphabet.size()];
// XXX (Executing this block often causes an error during training; I don't know why.)
if (saveParametersDuringFI) {
// Keep current parameter values
// XXX This relies on the implementation detail that the most recent features
// added to an Alphabet get the highest indices.
// Count parameters per output label
int oldParamCount = maxent.parameters.length / outputAlphabet.size();
int newParamCount = 1+inputAlphabet.size();
// Copy params into the proper locations
for (int i=0; i<outputAlphabet.size(); i++) {
System.arraycopy(maxent.parameters, i*oldParamCount,
newParameters, i*newParamCount,
oldParamCount);
}
for (int i=0; i<oldParamCount; i++)
if (maxent.parameters[i] != newParameters[i]) {
System.out.println(maxent.parameters[i]+" "+newParameters[i]);
System.exit(0);
}
}
maxent.parameters = newParameters;
maxent.defaultFeatureIndex = inputAlphabet.size();
}
// Finished feature induction
logger.info("Ended with "+globalFS.cardinality()+" features.");
setNumIterations(totalIterations - trainingIteration);
return this.train (trainingData, validationData, testingData,
evaluator, maxent);
}
*/
public String toString()
{
return "RankMaxEntTrainer"
// + "("+maximizerClass.getName()+") "
+ ",numIterations=" + numIterations
+ ",gaussianPriorVariance="+gaussianPriorVariance;
}
// A private inner class that wraps up a RankMaxEnt
// classifier and its training data. The result is a
// maximize.Maximizable function.
private class MaximizableTrainer implements Optimizable.ByGradientValue
{
double[] parameters, constraints, cachedGradient;
RankMaxEnt theClassifier;
InstanceList trainingList;
// The expectations are (temporarily) stored in the cachedGradient
double cachedValue;
boolean cachedValueStale;
boolean cachedGradientStale;
int numLabels;
int numFeatures;
int defaultFeatureIndex; // just for clarity
FeatureSelection featureSelection;
FeatureSelection[] perLabelFeatureSelection;
public MaximizableTrainer (){}
public MaximizableTrainer (InstanceList ilist, RankMaxEnt initialClassifier)
{
this.trainingList = ilist;
Alphabet fd = ilist.getDataAlphabet();
LabelAlphabet ld = (LabelAlphabet) ilist.getTargetAlphabet();
// Don't fd.stopGrowth, because someone might want to do feature induction
//ld.stopGrowth();
// Add one feature for the "default feature".
// assume underlying Instances are binary
//this.numLabels = underlyingLabelAlphabet.size();
// xxx
this.numLabels = 2;
this.numFeatures = fd.size() + 1;
this.defaultFeatureIndex = numFeatures-1;
this.parameters = new double [numLabels * numFeatures];
this.constraints = new double [numLabels * numFeatures];
this.cachedGradient = new double [numLabels * numFeatures];
Arrays.fill (parameters, 0.0);
Arrays.fill (constraints, 0.0);
Arrays.fill (cachedGradient, 0.0);
this.featureSelection = ilist.getFeatureSelection();
this.perLabelFeatureSelection = ilist.getPerLabelFeatureSelection();
// Add the default feature index to the selection
if (featureSelection != null)
featureSelection.add (defaultFeatureIndex);
if (perLabelFeatureSelection != null)
for (int i = 0; i < perLabelFeatureSelection.length; i++)
perLabelFeatureSelection[i].add (defaultFeatureIndex);
// xxx Later change this to allow both to be set, but select which one to use by a boolean flag?
assert (featureSelection == null || perLabelFeatureSelection == null);
if (initialClassifier != null) {
this.theClassifier = initialClassifier;
this.parameters = theClassifier.parameters;
this.featureSelection = theClassifier.featureSelection;
this.perLabelFeatureSelection = theClassifier.perClassFeatureSelection;
this.defaultFeatureIndex = theClassifier.defaultFeatureIndex;
assert (initialClassifier.getInstancePipe() == ilist.getPipe());
}
else if (this.theClassifier == null) {
this.theClassifier = new RankMaxEnt (ilist.getPipe(), parameters, featureSelection, perLabelFeatureSelection);
}
cachedValueStale = true;
cachedGradientStale = true;
// Initialize the constraints, using only the constraints from
// the "positive" instance
Iterator<Instance> iter = trainingList.iterator ();
logger.fine("Number of instances in training list = " + trainingList.size());
while (iter.hasNext()) {
Instance instance = iter.next();
double instanceWeight = trainingList.getInstanceWeight(instance);
FeatureVectorSequence fvs = (FeatureVectorSequence) instance.getData();
// label of best instance in subList
Object target = instance.getTarget();
Label label = null;
if (target instanceof Labels)
label = ((Labels)target).get(0);
else label = (Label)target;
int positiveIndex =
Integer.valueOf(label.getBestLabel().getEntry().toString()).intValue();
if (positiveIndex == -1) { // invalid instance
logger.warning("True label is -1. Skipping...");
continue;
}
FeatureVector fv = (FeatureVector)fvs.get(positiveIndex);
Alphabet fdict = fv.getAlphabet();
assert (fv.getAlphabet() == fd);
// xxx ensure dimensionality of constraints correct
MatrixOps.rowPlusEquals (constraints, numFeatures, 0, fv, instanceWeight);
// For the default feature, whose weight is 1.0
assert(!Double.isNaN(instanceWeight)) : "instanceWeight is NaN";
//assert(!Double.isNaN(li)) : "bestIndex is NaN";
boolean hasNaN = false;
for(int i = 0; i < fv.numLocations(); i++) {
if(Double.isNaN(fv.valueAtLocation(i))) {
logger.info("NaN for feature " + fdict.lookupObject(fv.indexAtLocation(i)).toString());
hasNaN = true;
}
}
if(hasNaN)
logger.info("NaN in instance: " + instance.getName());
// default constraints for positive instances xxx
constraints[0*numFeatures + defaultFeatureIndex] += 1.0 * instanceWeight;
}
//TestMaximizable.testValueAndGradientCurrentParameters (this);
}
public RankMaxEnt getClassifier () { return theClassifier; }
public double getParameter (int index) {
return parameters[index];
}
public void setParameter (int index, double v) {
cachedValueStale = true;
cachedGradientStale = true;
parameters[index] = v;
}
public int getNumParameters() {
return parameters.length;
}
public void getParameters (double[] buff) {
if (buff == null || buff.length != parameters.length)
buff = new double [parameters.length];
System.arraycopy (parameters, 0, buff, 0, parameters.length);
}
public void setParameters (double [] buff) {
assert (buff != null);
cachedValueStale = true;
cachedGradientStale = true;
if (buff.length != parameters.length)
parameters = new double[buff.length];
System.arraycopy (buff, 0, parameters, 0, buff.length);
}
// log probability of the training labels, which here means the
// probability of the positive example being labeled as such
public double getValue ()
{
if (cachedValueStale) {
cachedValue = 0;
// We'll store the expectation values in "cachedGradient" for now
cachedGradientStale = true;
MatrixOps.setAll (cachedGradient, 0.0);
// Incorporate likelihood of data
double value = 0.0;
Iterator<Instance> iter = trainingList.iterator();
int ii=0;
while (iter.hasNext()) {
ii++;
Instance instance = iter.next();
FeatureVectorSequence fvs = (FeatureVectorSequence) instance.getData();
// scores stores Pr of subList[i] being positive instance
double[] scores = new double[fvs.size()];
double instanceWeight = trainingList.getInstanceWeight(instance);
// labeling is a String representation of an int, indicating which FeatureVector from
// the subList is the positive example
// If is String, proceed as usual. Else, if is String[], do
// not penalize scores for duplicate entries. This improved accuracy in some expts.
Object target = instance.getTarget();
int li = -1;
if (target instanceof Label) {
li = Integer.valueOf(((Label)target).toString()).intValue();
if (li == -1) // hack to avoid invalid instances
continue;
assert (li >=0 && li < fvs.size());
this.theClassifier.getClassificationScores (instance, scores);
} else if (target instanceof Labels){
Labels labels = (Labels)target;
int[] bestPositions = new int[labels.size()];
for (int pi = 0; pi < labels.size(); pi++)
bestPositions[pi] = Integer.valueOf(labels.get(pi).toString());
li = bestPositions[0];
this.theClassifier.getClassificationScoresForTies (instance, scores, bestPositions);
}
value = - (instanceWeight * Math.log (scores[li]));
if(Double.isNaN(value)) {
logger.fine ("MaxEntTrainer: Instance " + instance.getName() +
"has NaN value. log(scores)= " + Math.log(scores[li]) +
" scores = " + scores[li] +
" has instance weight = " + instanceWeight);
}
if (Double.isInfinite(value)) {
logger.warning ("Instance "+instance.getSource() + " has infinite value; skipping value and gradient");
cachedValue -= value;
cachedValueStale = false;
return -value;
}
cachedValue += value;
double positiveScore = scores[li];
for (int si=0; si < fvs.size(); si++) {
if (scores[si]==0)
continue;
assert (!Double.isInfinite(scores[si]));
FeatureVector cfv = (FeatureVector)fvs.get(si);
MatrixOps.rowPlusEquals (cachedGradient, numFeatures,
0, cfv, -instanceWeight * scores[si]);
cachedGradient[numFeatures*0 + defaultFeatureIndex] += (-instanceWeight * scores[si]);
}
}
// Incorporate prior on parameters
for (int li = 0; li < numLabels; li++)
for (int fi = 0; fi < numFeatures; fi++) {
double param = parameters[li*numFeatures + fi];
cachedValue += param * param / (2 * gaussianPriorVariance);
}
cachedValue *= -1.0; // MAXIMIZE, NOT MINIMIZE
cachedValueStale = false;
progressLogger.info ("Value (loglikelihood) = "+cachedValue);
}
return cachedValue;
}
public void getValueGradient (double [] buffer)
{
// Gradient is (constraint - expectation - parameters/gaussianPriorVariance)
if (cachedGradientStale) {
if (cachedValueStale)
// This will fill in the cachedGradient with the "-expectation"
getValue ();
MatrixOps.plusEquals (cachedGradient, constraints);
// Incorporate prior on parameters
MatrixOps.plusEquals (cachedGradient, parameters, -1.0 / gaussianPriorVariance);
// A parameter may be set to -infinity by an external user.
// We set gradient to 0 because the parameter's value can
// never change anyway and it will mess up future calculations
// on the matrix, such as norm().
MatrixOps.substitute (cachedGradient, Double.NEGATIVE_INFINITY, 0.0);
// Set to zero all the gradient dimensions that are not among the selected features
if (perLabelFeatureSelection == null) {
for (int labelIndex = 0; labelIndex < numLabels; labelIndex++)
MatrixOps.rowSetAll (cachedGradient, numFeatures,
labelIndex, 0.0, featureSelection, false);
} else {
for (int labelIndex = 0; labelIndex < numLabels; labelIndex++)
MatrixOps.rowSetAll (cachedGradient, numFeatures,
labelIndex, 0.0,
perLabelFeatureSelection[labelIndex], false);
}
cachedGradientStale = false;
}
assert (buffer != null && buffer.length == parameters.length);
System.arraycopy (cachedGradient, 0, buffer, 0, cachedGradient.length);
}
}
// SERIALIZATION
private static final long serialVersionUID = 1;
private static final int CURRENT_SERIAL_VERSION = 1;
private void writeObject (ObjectOutputStream out) throws IOException {
out.defaultWriteObject ();
out.writeInt (CURRENT_SERIAL_VERSION);
}
private void readObject (ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject ();
int version = in.readInt ();
}
}
| 22,759 | 38.651568 | 119 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/classify/BalancedWinnow.java | /* Copyright (C) 2002 Univ. of Massachusetts Amherst, Computer Science Dept.
This file is part of "MALLET" (MAchine Learning for LanguagE Toolkit).
http://www.cs.umass.edu/~mccallum/mallet
This software is provided under the terms of the Common Public License,
version 1.0, as published by http://www.opensource.org. For further
information, see the file `LICENSE' included with this distribution. */
package cc.mallet.classify;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import cc.mallet.pipe.Pipe;
import cc.mallet.types.FeatureVector;
import cc.mallet.types.Instance;
import cc.mallet.types.LabelVector;
import cc.mallet.types.MatrixOps;
/**
* Classification methods of BalancedWinnow algorithm.
*
* @see BalancedWinnowTrainer
* @author Gary Huang <a href="mailto:[email protected]">[email protected]</a>
*/
public class BalancedWinnow extends Classifier implements Serializable
{
double [][] m_weights;
/**
* Passes along data pipe and weights from
* {@link #BalancedWinnowTrainer BalancedWinnowTrainer}
* @param dataPipe needed for dictionary, labels, feature vectors, etc
* @param weights weights calculated during training phase
*/
public BalancedWinnow (Pipe dataPipe, double [][] weights)
{
super (dataPipe);
m_weights = new double[weights.length][weights[0].length];
for (int i = 0; i < weights.length; i++)
for (int j = 0; j < weights[0].length; j++)
m_weights[i][j] = weights[i][j];
}
/**
* @return a copy of the weight vectors
*/
public double[][] getWeights()
{
int numCols = m_weights[0].length;
double[][] ret = new double[m_weights.length][numCols];
for (int i = 0; i < ret.length; i++)
System.arraycopy(m_weights[i], 0, ret[i], 0, numCols);
return ret;
}
/**
* Classifies an instance using BalancedWinnow's weights
*
* <p>Returns a Classification containing the normalized
* dot products between class weight vectors and the instance
* feature vector.
*
* <p>One can obtain the confidence of the classification by
* calculating weight(j')/weight(j), where j' is the
* highest weight prediction and j is the 2nd-highest.
* Another possibility is to calculate
* <br><tt><center>e^{dot(w_j', x} / sum_j[e^{dot(w_j, x)}]</center></tt>
*/
public Classification classify (Instance instance)
{
int numClasses = getLabelAlphabet().size();
int numFeats = getAlphabet().size();
double[] scores = new double[numClasses];
FeatureVector fv = (FeatureVector) instance.getData ();
// Make sure the feature vector's feature dictionary matches
// what we are expecting from our data pipe (and thus our notion
// of feature probabilities.
assert (instancePipe == null || fv.getAlphabet () == this.instancePipe.getDataAlphabet ());
int fvisize = fv.numLocations();
// Take dot products
double sum = 0;
for (int ci = 0; ci < numClasses; ci++) {
for (int fvi = 0; fvi < fvisize; fvi++) {
int fi = fv.indexAtLocation (fvi);
double vi = fv.valueAtLocation(fvi);
if ( m_weights[ci].length > fi ) {
scores[ci] += vi * m_weights[ci][fi];
sum += vi * m_weights[ci][fi];
}
}
scores[ci] += m_weights[ci][numFeats];
sum += m_weights[ci][numFeats];
}
MatrixOps.timesEquals(scores, 1.0 / sum);
// Create and return a Classification object
return new Classification (instance, this, new LabelVector (getLabelAlphabet(), scores));
}
// Serialization
// serialVersionUID is overriden to prevent innocuous changes in this
// class from making the serialization mechanism think the external
// format has changed.
private static final long serialVersionUID = 1;
private static final int CURRENT_SERIAL_VERSION = 1;
private void writeObject(ObjectOutputStream out) throws IOException
{
out.writeInt(CURRENT_SERIAL_VERSION);
out.writeObject(getInstancePipe());
// write weight vector for each class
out.writeObject(m_weights);
}
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
int version = in.readInt();
if (version != CURRENT_SERIAL_VERSION)
throw new ClassNotFoundException("Mismatched BalancedWinnow versions: wanted " +
CURRENT_SERIAL_VERSION + ", got " +
version);
instancePipe = (Pipe) in.readObject();
m_weights = (double[][]) in.readObject();
}
}
| 4,752 | 33.693431 | 99 | java |
twitter_nlp | twitter_nlp-master/mallet-2.0.6/src/cc/mallet/classify/MaxEntGETrainer.java | /* Copyright (C) 2002 Univ. of Massachusetts Amherst, Computer Science Dept.
This file is part of "MALLET" (MAchine Learning for LanguagE Toolkit).
http://www.cs.umass.edu/~mccallum/mallet
This software is provided under the terms of the Common Public License,
version 1.0, as published by http://www.opensource.org. For further
information, see the file `LICENSE' included with this distribution. */
package cc.mallet.classify;
import java.io.Serializable;
import java.util.HashMap;
import java.util.logging.Logger;
import cc.mallet.optimize.LimitedMemoryBFGS;
import cc.mallet.optimize.Optimizable;
import cc.mallet.optimize.Optimizer;
import cc.mallet.types.InstanceList;
import cc.mallet.util.MalletLogger;
import cc.mallet.util.MalletProgressMessageLogger;
/**
* Training of MaxEnt models with labeled features using
* Generalized Expectation Criteria.
*
* Based on:
* "Learning from Labeled Features using Generalized Expectation Criteria"
* Gregory Druck, Gideon Mann, Andrew McCallum
* SIGIR 2008
*
* @author Gregory Druck <a href="mailto:[email protected]">[email protected]</a>
*
* Better explanations of parameters is given in MaxEntOptimizableByGE
*/
public class MaxEntGETrainer extends ClassifierTrainer<MaxEnt> implements ClassifierTrainer.ByOptimization<MaxEnt>, Boostable, Serializable {
private static final long serialVersionUID = 1L;
private static Logger logger = MalletLogger.getLogger(MaxEntGETrainer.class.getName());
private static Logger progressLogger = MalletProgressMessageLogger.getLogger(MaxEntGETrainer.class.getName()+"-pl");
private int numIterations = Integer.MAX_VALUE;
private double temperature = 1;
private double gaussianPriorVariance = 1;
private String constraintsFile;
private HashMap<Integer,double[]> constraints;
private InstanceList trainingList = null;
private MaxEnt classifier = null;
private MaxEntOptimizableByGE ge = null;
private Optimizer opt = null;
public MaxEntGETrainer() {}
public MaxEntGETrainer(HashMap<Integer,double[]> constraints) {
this.constraints = constraints;
}
public MaxEntGETrainer(HashMap<Integer,double[]> constraints, MaxEnt classifier) {
this.constraints = constraints;
this.classifier = classifier;
}
public void setConstraintsFile(String filename) {
this.constraintsFile = filename;
}
public void setTemperature(double temp) {
this.temperature = temp;
}
public void setGaussianPriorVariance(double variance) {
this.gaussianPriorVariance = variance;
}
public MaxEnt getClassifier () {
return classifier;
}
public Optimizable getOptimizable () {
return ge;
}
public Optimizer getOptimizer () {
return opt;
}
/**
* Specifies the maximum number of iterations to run during a single call
* to <code>train</code> or <code>trainWithFeatureInduction</code>.
* @return This trainer
*/
public void setNumIterations (int i) {
numIterations = i;
}
public int getIteration () {
if (ge == null)
return 0;
else
return Integer.MAX_VALUE;
}
public MaxEnt train (InstanceList trainingList) {
return train (trainingList, numIterations);
}
public MaxEnt train (InstanceList train, int numIterations) {
trainingList = train;
if (constraints == null && constraintsFile != null) {
constraints = FeatureConstraintUtil.readConstraintsFromFile(constraintsFile, trainingList);
logger.info("number of constraints: " + constraints.size());
}
ge = new MaxEntOptimizableByGE(trainingList,constraints,classifier);
ge.setTemperature(temperature);
ge.setGaussianPriorVariance(gaussianPriorVariance);
opt = new LimitedMemoryBFGS(ge);
logger.fine ("trainingList.size() = "+trainingList.size());
boolean converged;
for (int i = 0; i < numIterations; i++) {
try {
converged = opt.optimize (1);
} catch (Exception e) {
e.printStackTrace();
logger.info ("Catching exception; saying converged.");
converged = true;
}
if (converged)
break;
}
if (numIterations == Integer.MAX_VALUE) {
// Run it again because in our and Sam Roweis' experience, BFGS can still
// eke out more likelihood after first convergence by re-running without
// being restricted by its gradient history.
opt = new LimitedMemoryBFGS(ge);
try {
opt.optimize ();
} catch (Exception e) {
e.printStackTrace();
logger.info ("Catching exception; saying converged.");
}
}
progressLogger.info("\n"); // progress messages are on one line; move on.
classifier = ge.getClassifier();
return classifier;
}
}
| 4,755 | 30.084967 | 141 | java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.