code
stringlengths 24
2.07M
| docstring
stringlengths 25
85.3k
| func_name
stringlengths 1
92
| language
stringclasses 1
value | repo
stringlengths 5
64
| path
stringlengths 4
172
| url
stringlengths 44
218
| license
stringclasses 7
values |
---|---|---|---|---|---|---|---|
setRepoUnavailable = async function(packageName, value) {
await setValue(packageName, propKeys.repoUnavailable, value ? "1" : "0");
}
|
Get the cached image filename
@param {string} packageName
|
setRepoUnavailable
|
javascript
|
openupm/openupm
|
app/models/packageExtra.js
|
https://github.com/openupm/openupm/blob/master/app/models/packageExtra.js
|
BSD-3-Clause
|
getRepoUnavailable = async function(packageName) {
const text = await getValue(packageName, propKeys.repoUnavailable);
return text == "1" ? true : false;
}
|
Get the cached image filename
@param {string} packageName
|
getRepoUnavailable
|
javascript
|
openupm/openupm
|
app/models/packageExtra.js
|
https://github.com/openupm/openupm/blob/master/app/models/packageExtra.js
|
BSD-3-Clause
|
getRepoUnavailable = async function(packageName) {
const text = await getValue(packageName, propKeys.repoUnavailable);
return text == "1" ? true : false;
}
|
Get the cached image filename
@param {string} packageName
|
getRepoUnavailable
|
javascript
|
openupm/openupm
|
app/models/packageExtra.js
|
https://github.com/openupm/openupm/blob/master/app/models/packageExtra.js
|
BSD-3-Clause
|
setUpdatedTime = async function(packageName, updatedTime) {
await setValue(packageName, propKeys.updatedTime, updatedTime);
}
|
Get the cached image filename
@param {string} packageName
|
setUpdatedTime
|
javascript
|
openupm/openupm
|
app/models/packageExtra.js
|
https://github.com/openupm/openupm/blob/master/app/models/packageExtra.js
|
BSD-3-Clause
|
setUpdatedTime = async function(packageName, updatedTime) {
await setValue(packageName, propKeys.updatedTime, updatedTime);
}
|
Get the cached image filename
@param {string} packageName
|
setUpdatedTime
|
javascript
|
openupm/openupm
|
app/models/packageExtra.js
|
https://github.com/openupm/openupm/blob/master/app/models/packageExtra.js
|
BSD-3-Clause
|
getUpdatedTime = async function(packageName) {
const value = await getValue(packageName, propKeys.updatedTime);
return parseInt(value) || 0;
}
|
Get the cached image filename
@param {string} packageName
|
getUpdatedTime
|
javascript
|
openupm/openupm
|
app/models/packageExtra.js
|
https://github.com/openupm/openupm/blob/master/app/models/packageExtra.js
|
BSD-3-Clause
|
getUpdatedTime = async function(packageName) {
const value = await getValue(packageName, propKeys.updatedTime);
return parseInt(value) || 0;
}
|
Get the cached image filename
@param {string} packageName
|
getUpdatedTime
|
javascript
|
openupm/openupm
|
app/models/packageExtra.js
|
https://github.com/openupm/openupm/blob/master/app/models/packageExtra.js
|
BSD-3-Clause
|
getMonthlyDownloads = async function(packageName) {
const value = await getValue(packageName, propKeys.monthlyDownloads);
return parseInt(value) || 0;
}
|
Get monthly downloads for a package.
@param {string} packageName - The name of the package.
@returns {Promise<number>} - A Promise that resolves to the number of downloads.
|
getMonthlyDownloads
|
javascript
|
openupm/openupm
|
app/models/packageExtra.js
|
https://github.com/openupm/openupm/blob/master/app/models/packageExtra.js
|
BSD-3-Clause
|
getMonthlyDownloads = async function(packageName) {
const value = await getValue(packageName, propKeys.monthlyDownloads);
return parseInt(value) || 0;
}
|
Get monthly downloads for a package.
@param {string} packageName - The name of the package.
@returns {Promise<number>} - A Promise that resolves to the number of downloads.
|
getMonthlyDownloads
|
javascript
|
openupm/openupm
|
app/models/packageExtra.js
|
https://github.com/openupm/openupm/blob/master/app/models/packageExtra.js
|
BSD-3-Clause
|
setMonthlyDownloads = async function(packageName, downloads) {
await setValue(packageName, propKeys.monthlyDownloads, downloads);
}
|
Set monthly downloads for a package.
@param {string} packageName - The name of the package.
@param {number} downloads - The number of downloads to set.
@returns {Promise<void>} - A Promise that resolves when the downloads have been set.
|
setMonthlyDownloads
|
javascript
|
openupm/openupm
|
app/models/packageExtra.js
|
https://github.com/openupm/openupm/blob/master/app/models/packageExtra.js
|
BSD-3-Clause
|
setMonthlyDownloads = async function(packageName, downloads) {
await setValue(packageName, propKeys.monthlyDownloads, downloads);
}
|
Set monthly downloads for a package.
@param {string} packageName - The name of the package.
@param {number} downloads - The number of downloads to set.
@returns {Promise<void>} - A Promise that resolves when the downloads have been set.
|
setMonthlyDownloads
|
javascript
|
openupm/openupm
|
app/models/packageExtra.js
|
https://github.com/openupm/openupm/blob/master/app/models/packageExtra.js
|
BSD-3-Clause
|
setValue = async function(packageName, propKey, propVal) {
const key = packageKey + packageName;
await redis.client.hset(key, propKey, propVal);
}
|
Set monthly downloads for a package.
@param {string} packageName - The name of the package.
@param {number} downloads - The number of downloads to set.
@returns {Promise<void>} - A Promise that resolves when the downloads have been set.
|
setValue
|
javascript
|
openupm/openupm
|
app/models/packageExtra.js
|
https://github.com/openupm/openupm/blob/master/app/models/packageExtra.js
|
BSD-3-Clause
|
setValue = async function(packageName, propKey, propVal) {
const key = packageKey + packageName;
await redis.client.hset(key, propKey, propVal);
}
|
Set monthly downloads for a package.
@param {string} packageName - The name of the package.
@param {number} downloads - The number of downloads to set.
@returns {Promise<void>} - A Promise that resolves when the downloads have been set.
|
setValue
|
javascript
|
openupm/openupm
|
app/models/packageExtra.js
|
https://github.com/openupm/openupm/blob/master/app/models/packageExtra.js
|
BSD-3-Clause
|
getValue = async function(packageName, propKey) {
const key = packageKey + packageName;
return await redis.client.hget(key, propKey);
}
|
Set monthly downloads for a package.
@param {string} packageName - The name of the package.
@param {number} downloads - The number of downloads to set.
@returns {Promise<void>} - A Promise that resolves when the downloads have been set.
|
getValue
|
javascript
|
openupm/openupm
|
app/models/packageExtra.js
|
https://github.com/openupm/openupm/blob/master/app/models/packageExtra.js
|
BSD-3-Clause
|
getValue = async function(packageName, propKey) {
const key = packageKey + packageName;
return await redis.client.hget(key, propKey);
}
|
Set monthly downloads for a package.
@param {string} packageName - The name of the package.
@param {number} downloads - The number of downloads to set.
@returns {Promise<void>} - A Promise that resolves when the downloads have been set.
|
getValue
|
javascript
|
openupm/openupm
|
app/models/packageExtra.js
|
https://github.com/openupm/openupm/blob/master/app/models/packageExtra.js
|
BSD-3-Clause
|
setAggregatedExtraData = async function(obj) {
const jsonText = JSON.stringify(obj, null, 0);
await redis.client.set(allPackagesExtraKey, jsonText);
}
|
Set aggregated extra data.
@param {object} obj
|
setAggregatedExtraData
|
javascript
|
openupm/openupm
|
app/models/packageExtra.js
|
https://github.com/openupm/openupm/blob/master/app/models/packageExtra.js
|
BSD-3-Clause
|
setAggregatedExtraData = async function(obj) {
const jsonText = JSON.stringify(obj, null, 0);
await redis.client.set(allPackagesExtraKey, jsonText);
}
|
Set aggregated extra data.
@param {object} obj
|
setAggregatedExtraData
|
javascript
|
openupm/openupm
|
app/models/packageExtra.js
|
https://github.com/openupm/openupm/blob/master/app/models/packageExtra.js
|
BSD-3-Clause
|
setRecentPackages = async function(arr) {
const jsonText = JSON.stringify(arr, null, 0);
await redis.client.set(recentPackagesKey, jsonText);
}
|
Set recent packages.
@param {object} obj
|
setRecentPackages
|
javascript
|
openupm/openupm
|
app/models/packageExtra.js
|
https://github.com/openupm/openupm/blob/master/app/models/packageExtra.js
|
BSD-3-Clause
|
setRecentPackages = async function(arr) {
const jsonText = JSON.stringify(arr, null, 0);
await redis.client.set(recentPackagesKey, jsonText);
}
|
Set recent packages.
@param {object} obj
|
setRecentPackages
|
javascript
|
openupm/openupm
|
app/models/packageExtra.js
|
https://github.com/openupm/openupm/blob/master/app/models/packageExtra.js
|
BSD-3-Clause
|
setFeedRecentUpdate = async function(objs) {
// Sort by time.
objs = orderBy(objs, ["time"], ["desc"]);
// Generate the feed.
const feed = new Feed({
title: "OpenUPM Recent Updates",
description: "Feed of OpenUPM Recently Updated Packages",
id: "https://openupm.com/",
link: "https://openupm.com/",
language: "en",
image: "https://openupm.com/images/openupm-icon-256.png",
copyright: "Copyright @ 2019 Favo Yang",
feedLinks: {
rss: "https://openupm.com/feeds/updates/rss",
json: "https://openupm.com/feeds/updates/json",
atom: "https://openupm.com/feeds/updates/atom"
},
author: {
name: "OpenUPM",
email: "[email protected]",
link: "https://openupm.com"
}
});
const limit = Math.min(objs.length, config.feeds.recentUpdateCount);
for (let i = 0; i < limit; i++) {
const obj = objs[i];
const guid = `${obj.packageName}@${obj.version}`;
const url = `https://openupm.com/packages/${obj.packageName}`;
const title = `${obj.displayName} v${obj.version} release`;
const description = `Package ${obj.packageName} v${obj.version} is released.`;
const date = new Date(obj.time);
feed.addItem({
title,
id: guid,
link: url,
description,
content: description,
date,
author: obj.author,
image: obj.image
});
}
// Save for formats.
const rss2 = feed.rss2();
await redis.client.set(feedRecentUpdateKey + "rss2", rss2);
const atom1 = feed.atom1();
await redis.client.set(feedRecentUpdateKey + "atom1", atom1);
const json1 = feed.json1();
await redis.client.set(feedRecentUpdateKey + "json1", json1);
}
|
Set aggregated extra data.
@param {Array} objs
[{
packageName: str,
displayName: str,
time: int,
version: str,
author: [
{
name: str,
link: str
}, ...]
}, ...]
|
setFeedRecentUpdate
|
javascript
|
openupm/openupm
|
app/models/packageFeed.js
|
https://github.com/openupm/openupm/blob/master/app/models/packageFeed.js
|
BSD-3-Clause
|
setFeedRecentUpdate = async function(objs) {
// Sort by time.
objs = orderBy(objs, ["time"], ["desc"]);
// Generate the feed.
const feed = new Feed({
title: "OpenUPM Recent Updates",
description: "Feed of OpenUPM Recently Updated Packages",
id: "https://openupm.com/",
link: "https://openupm.com/",
language: "en",
image: "https://openupm.com/images/openupm-icon-256.png",
copyright: "Copyright @ 2019 Favo Yang",
feedLinks: {
rss: "https://openupm.com/feeds/updates/rss",
json: "https://openupm.com/feeds/updates/json",
atom: "https://openupm.com/feeds/updates/atom"
},
author: {
name: "OpenUPM",
email: "[email protected]",
link: "https://openupm.com"
}
});
const limit = Math.min(objs.length, config.feeds.recentUpdateCount);
for (let i = 0; i < limit; i++) {
const obj = objs[i];
const guid = `${obj.packageName}@${obj.version}`;
const url = `https://openupm.com/packages/${obj.packageName}`;
const title = `${obj.displayName} v${obj.version} release`;
const description = `Package ${obj.packageName} v${obj.version} is released.`;
const date = new Date(obj.time);
feed.addItem({
title,
id: guid,
link: url,
description,
content: description,
date,
author: obj.author,
image: obj.image
});
}
// Save for formats.
const rss2 = feed.rss2();
await redis.client.set(feedRecentUpdateKey + "rss2", rss2);
const atom1 = feed.atom1();
await redis.client.set(feedRecentUpdateKey + "atom1", atom1);
const json1 = feed.json1();
await redis.client.set(feedRecentUpdateKey + "json1", json1);
}
|
Set aggregated extra data.
@param {Array} objs
[{
packageName: str,
displayName: str,
time: int,
version: str,
author: [
{
name: str,
link: str
}, ...]
}, ...]
|
setFeedRecentUpdate
|
javascript
|
openupm/openupm
|
app/models/packageFeed.js
|
https://github.com/openupm/openupm/blob/master/app/models/packageFeed.js
|
BSD-3-Clause
|
function getGithubToken() {
if (config.github.tokens && config.github.tokens.length > 0)
// Return random token from the list.
return config.github.tokens[
Math.floor(Math.random() * config.github.tokens.length)
];
// Fall back to the single token.
if (config.github.token) return config.github.token;
}
|
Return GitHub token from the configuration.
@returns The GitHub token.
|
getGithubToken
|
javascript
|
openupm/openupm
|
app/utils/github.js
|
https://github.com/openupm/openupm/blob/master/app/utils/github.js
|
BSD-3-Clause
|
healthCheck = async function(checkId) {
if (process.env.NODE_ENV !== "production") return;
try {
let resp = null;
const source = CancelToken.source();
setTimeout(() => {
if (resp === null) source.cancel("ECONNTIMEOUT");
}, 10000);
resp = await AxiosService.create().get(
urljoin("https://hc-ping.com/", checkId),
{ cancelToken: source.token }
);
return resp.data;
} catch (error) {
logger.error(httpErrorInfo(error, { checkId }), "healthcheck error");
}
}
|
Ping healthchecks.io
@param {string} checkId
|
healthCheck
|
javascript
|
openupm/openupm
|
app/utils/healthCheck.js
|
https://github.com/openupm/openupm/blob/master/app/utils/healthCheck.js
|
BSD-3-Clause
|
healthCheck = async function(checkId) {
if (process.env.NODE_ENV !== "production") return;
try {
let resp = null;
const source = CancelToken.source();
setTimeout(() => {
if (resp === null) source.cancel("ECONNTIMEOUT");
}, 10000);
resp = await AxiosService.create().get(
urljoin("https://hc-ping.com/", checkId),
{ cancelToken: source.token }
);
return resp.data;
} catch (error) {
logger.error(httpErrorInfo(error, { checkId }), "healthcheck error");
}
}
|
Ping healthchecks.io
@param {string} checkId
|
healthCheck
|
javascript
|
openupm/openupm
|
app/utils/healthCheck.js
|
https://github.com/openupm/openupm/blob/master/app/utils/healthCheck.js
|
BSD-3-Clause
|
httpErrorInfo = function(err, others) {
// Show http status if possible or fallback to error
if (err.response && err.response.status)
return { status: err.response.status, ...others };
else return { err, ...others };
}
|
Return HTTP error info object
@param {Object} error
@param {Object} others
|
httpErrorInfo
|
javascript
|
openupm/openupm
|
app/utils/http.js
|
https://github.com/openupm/openupm/blob/master/app/utils/http.js
|
BSD-3-Clause
|
httpErrorInfo = function(err, others) {
// Show http status if possible or fallback to error
if (err.response && err.response.status)
return { status: err.response.status, ...others };
else return { err, ...others };
}
|
Return HTTP error info object
@param {Object} error
@param {Object} others
|
httpErrorInfo
|
javascript
|
openupm/openupm
|
app/utils/http.js
|
https://github.com/openupm/openupm/blob/master/app/utils/http.js
|
BSD-3-Clause
|
isErrorCode = function(error, code) {
return error.response && error.response.status == code;
}
|
Return if error has given status code.
@param {Object} error
@param {Number} code
|
isErrorCode
|
javascript
|
openupm/openupm
|
app/utils/http.js
|
https://github.com/openupm/openupm/blob/master/app/utils/http.js
|
BSD-3-Clause
|
isErrorCode = function(error, code) {
return error.response && error.response.status == code;
}
|
Return if error has given status code.
@param {Object} error
@param {Number} code
|
isErrorCode
|
javascript
|
openupm/openupm
|
app/utils/http.js
|
https://github.com/openupm/openupm/blob/master/app/utils/http.js
|
BSD-3-Clause
|
addImage = async function({
imageUrl,
width,
height,
fit,
duration,
filename,
force
}) {
const key = getMediaKey({ imageUrl, width, height, fit });
const expire = new Date().getTime() + duration;
const oldImageEntry = await getImage({ imageUrl, width, height, fit });
// download image to a tmp file
const tmpFilename = getMediaTempFilename({ imageUrl, width, height, fit });
const tmpFilePath = path.join(mediaDir, tmpFilename);
await _downloadImageUrl(imageUrl, tmpFilePath);
try {
// check the image size
const newSize = fs.statSync(tmpFilePath).size;
if (oldImageEntry && !force) {
const oldSize = oldImageEntry.size;
if (oldSize == newSize) {
// update the expire time
await redis.client.hset(key, "expire", expire);
logger.info(
{ imageUrl, width, height, fit },
"_cacheImage size remains the same, only update the expire time"
);
return;
}
}
// process the image
if (!filename)
filename = getMediaFilename({
imageUrl,
width,
height,
fit,
size: newSize
});
const filePath = path.join(mediaDir, filename);
await _processImage({
sourcePath: tmpFilePath,
destLocalPath: filePath,
destS3Path: getMediaS3Path(filename),
width,
height,
fit
});
// update redis
await redis.client.hmset(key, {
size: newSize,
expire,
filename
});
} finally {
// remove the tmp file
fs.unlinkSync(tmpFilePath);
}
}
|
Download, process a image and upload to S3.
@param {object} param0
|
addImage
|
javascript
|
openupm/openupm
|
app/utils/media.js
|
https://github.com/openupm/openupm/blob/master/app/utils/media.js
|
BSD-3-Clause
|
addImage = async function({
imageUrl,
width,
height,
fit,
duration,
filename,
force
}) {
const key = getMediaKey({ imageUrl, width, height, fit });
const expire = new Date().getTime() + duration;
const oldImageEntry = await getImage({ imageUrl, width, height, fit });
// download image to a tmp file
const tmpFilename = getMediaTempFilename({ imageUrl, width, height, fit });
const tmpFilePath = path.join(mediaDir, tmpFilename);
await _downloadImageUrl(imageUrl, tmpFilePath);
try {
// check the image size
const newSize = fs.statSync(tmpFilePath).size;
if (oldImageEntry && !force) {
const oldSize = oldImageEntry.size;
if (oldSize == newSize) {
// update the expire time
await redis.client.hset(key, "expire", expire);
logger.info(
{ imageUrl, width, height, fit },
"_cacheImage size remains the same, only update the expire time"
);
return;
}
}
// process the image
if (!filename)
filename = getMediaFilename({
imageUrl,
width,
height,
fit,
size: newSize
});
const filePath = path.join(mediaDir, filename);
await _processImage({
sourcePath: tmpFilePath,
destLocalPath: filePath,
destS3Path: getMediaS3Path(filename),
width,
height,
fit
});
// update redis
await redis.client.hmset(key, {
size: newSize,
expire,
filename
});
} finally {
// remove the tmp file
fs.unlinkSync(tmpFilePath);
}
}
|
Download, process a image and upload to S3.
@param {object} param0
|
addImage
|
javascript
|
openupm/openupm
|
app/utils/media.js
|
https://github.com/openupm/openupm/blob/master/app/utils/media.js
|
BSD-3-Clause
|
_downloadImageUrl = async function(imageUrl, destPath) {
let resp = null;
const source = CancelToken.source();
setTimeout(() => {
if (resp === null) source.cancel("ECONNTIMEOUT");
}, 10000);
const headers = {};
resp = await AxiosService.create().get(imageUrl, {
headers,
cancelToken: source.token,
responseType: "stream"
});
const readStream = resp.data;
const writeStream = fs.createWriteStream(destPath);
readStream.pipe(writeStream);
const streamEnd = new Promise(function(resolve, reject) {
writeStream.on("close", () => resolve(null));
readStream.on("error", reject);
});
await streamEnd;
logger.info({ imageUrl, destPath }, "image downloaded");
}
|
Download the image url to the dest path
@param {string} imageUrl
@param {string} destPath
|
_downloadImageUrl
|
javascript
|
openupm/openupm
|
app/utils/media.js
|
https://github.com/openupm/openupm/blob/master/app/utils/media.js
|
BSD-3-Clause
|
_downloadImageUrl = async function(imageUrl, destPath) {
let resp = null;
const source = CancelToken.source();
setTimeout(() => {
if (resp === null) source.cancel("ECONNTIMEOUT");
}, 10000);
const headers = {};
resp = await AxiosService.create().get(imageUrl, {
headers,
cancelToken: source.token,
responseType: "stream"
});
const readStream = resp.data;
const writeStream = fs.createWriteStream(destPath);
readStream.pipe(writeStream);
const streamEnd = new Promise(function(resolve, reject) {
writeStream.on("close", () => resolve(null));
readStream.on("error", reject);
});
await streamEnd;
logger.info({ imageUrl, destPath }, "image downloaded");
}
|
Download the image url to the dest path
@param {string} imageUrl
@param {string} destPath
|
_downloadImageUrl
|
javascript
|
openupm/openupm
|
app/utils/media.js
|
https://github.com/openupm/openupm/blob/master/app/utils/media.js
|
BSD-3-Clause
|
_processImage = async function({
sourcePath,
destLocalPath,
destS3Path,
width,
height,
fit
}) {
const image = sharp(sourcePath);
await image
.resize(width, height, {
fit,
background: { r: 255, g: 255, b: 255, alpha: 0 }
})
.png()
.toFile(destLocalPath);
// copy to s3
await s3.uploadFile({
bucket: config.s3.mediaBucket,
localPath: destLocalPath,
remotePath: destS3Path,
acl: "public-read",
contentType: "image/png"
});
logger.info({ sourcePath, destLocalPath, destS3Path }, "image processed");
}
|
Process the image and upload to s3
@param {object} param0
|
_processImage
|
javascript
|
openupm/openupm
|
app/utils/media.js
|
https://github.com/openupm/openupm/blob/master/app/utils/media.js
|
BSD-3-Clause
|
_processImage = async function({
sourcePath,
destLocalPath,
destS3Path,
width,
height,
fit
}) {
const image = sharp(sourcePath);
await image
.resize(width, height, {
fit,
background: { r: 255, g: 255, b: 255, alpha: 0 }
})
.png()
.toFile(destLocalPath);
// copy to s3
await s3.uploadFile({
bucket: config.s3.mediaBucket,
localPath: destLocalPath,
remotePath: destS3Path,
acl: "public-read",
contentType: "image/png"
});
logger.info({ sourcePath, destLocalPath, destS3Path }, "image processed");
}
|
Process the image and upload to s3
@param {object} param0
|
_processImage
|
javascript
|
openupm/openupm
|
app/utils/media.js
|
https://github.com/openupm/openupm/blob/master/app/utils/media.js
|
BSD-3-Clause
|
getMediaS3Path = function(filename) {
return `media/${filename}`;
}
|
Get media S3 path
@param {string} filename
|
getMediaS3Path
|
javascript
|
openupm/openupm
|
app/utils/media.js
|
https://github.com/openupm/openupm/blob/master/app/utils/media.js
|
BSD-3-Clause
|
getMediaS3Path = function(filename) {
return `media/${filename}`;
}
|
Get media S3 path
@param {string} filename
|
getMediaS3Path
|
javascript
|
openupm/openupm
|
app/utils/media.js
|
https://github.com/openupm/openupm/blob/master/app/utils/media.js
|
BSD-3-Clause
|
getMediaTempFilename = function({ imageUrl, width, height, fit }) {
const md5 = crypto
.createHash("md5")
.update(imageUrl)
.digest("hex");
const now = new Date().getTime();
return `${md5}-${width}x${height}-${fit}-${now}.tmp`;
}
|
Get media tmp filename
@param {object} param0
|
getMediaTempFilename
|
javascript
|
openupm/openupm
|
app/utils/media.js
|
https://github.com/openupm/openupm/blob/master/app/utils/media.js
|
BSD-3-Clause
|
getMediaTempFilename = function({ imageUrl, width, height, fit }) {
const md5 = crypto
.createHash("md5")
.update(imageUrl)
.digest("hex");
const now = new Date().getTime();
return `${md5}-${width}x${height}-${fit}-${now}.tmp`;
}
|
Get media tmp filename
@param {object} param0
|
getMediaTempFilename
|
javascript
|
openupm/openupm
|
app/utils/media.js
|
https://github.com/openupm/openupm/blob/master/app/utils/media.js
|
BSD-3-Clause
|
getImage = async function({ imageUrl, width, height, fit }) {
const key = getMediaKey({ imageUrl, width, height, fit });
const obj = await redis.client.hgetall(key);
if (isEmpty(obj))
return null;
obj.size = parseInt(obj.size) || 0;
if (!obj.filename)
obj.filename = getMediaFilename({
imageUrl,
width,
height,
fit,
size: obj.size
});
obj.filePath = path.join(mediaDir, obj.filename);
obj.s3Path = getMediaS3Path(obj.filename);
obj.expire = parseInt(obj.expire) || 0;
const now = new Date().getTime();
obj.available = now <= obj.expire;
return obj;
}
|
Get the image entry { available, filename, filePath, s3Path, expire, size }
@param {object} param0
|
getImage
|
javascript
|
openupm/openupm
|
app/utils/media.js
|
https://github.com/openupm/openupm/blob/master/app/utils/media.js
|
BSD-3-Clause
|
getImage = async function({ imageUrl, width, height, fit }) {
const key = getMediaKey({ imageUrl, width, height, fit });
const obj = await redis.client.hgetall(key);
if (isEmpty(obj))
return null;
obj.size = parseInt(obj.size) || 0;
if (!obj.filename)
obj.filename = getMediaFilename({
imageUrl,
width,
height,
fit,
size: obj.size
});
obj.filePath = path.join(mediaDir, obj.filename);
obj.s3Path = getMediaS3Path(obj.filename);
obj.expire = parseInt(obj.expire) || 0;
const now = new Date().getTime();
obj.available = now <= obj.expire;
return obj;
}
|
Get the image entry { available, filename, filePath, s3Path, expire, size }
@param {object} param0
|
getImage
|
javascript
|
openupm/openupm
|
app/utils/media.js
|
https://github.com/openupm/openupm/blob/master/app/utils/media.js
|
BSD-3-Clause
|
readJsFile = function(file, onComplete, ...callbackArgs) {
fs.readFile(path.relative(process.cwd(), file), 'utf8', function(err, code) {
if (err) {
return console.error(err);
}
onComplete(file, code, ...callbackArgs);
});
}
|
@todo Add features from the Non-CLI side of this module such as:
2. Custom abstraction level
3. Presentation mode
4. Defined colour schemes (default, B&W, blurred, light)
5. Custom colour scheme
6. Custom style
7. Flow tree modifications (iterative methods treated as loops, ...)
8. Custom modifier
9. Debugging
@todo Continue 3.
|
readJsFile
|
javascript
|
Bogdan-Lyashenko/js-code-to-svg-flowchart
|
cli/index.cli.js
|
https://github.com/Bogdan-Lyashenko/js-code-to-svg-flowchart/blob/master/cli/index.cli.js
|
MIT
|
readJsFile = function(file, onComplete, ...callbackArgs) {
fs.readFile(path.relative(process.cwd(), file), 'utf8', function(err, code) {
if (err) {
return console.error(err);
}
onComplete(file, code, ...callbackArgs);
});
}
|
@todo Add features from the Non-CLI side of this module such as:
2. Custom abstraction level
3. Presentation mode
4. Defined colour schemes (default, B&W, blurred, light)
5. Custom colour scheme
6. Custom style
7. Flow tree modifications (iterative methods treated as loops, ...)
8. Custom modifier
9. Debugging
@todo Continue 3.
|
readJsFile
|
javascript
|
Bogdan-Lyashenko/js-code-to-svg-flowchart
|
cli/index.cli.js
|
https://github.com/Bogdan-Lyashenko/js-code-to-svg-flowchart/blob/master/cli/index.cli.js
|
MIT
|
writeToFile = function(filePath, data) {
fs.writeFile(filePath, data, function(err) {
if (err) {
return console.error(err);
}
console.log(`SVG file was created: ${filePath}`);
});
}
|
@description Write data to the specified file path.
@param {string} filePath Path of the destination file
@param {*} data Data to write to the destination
|
writeToFile
|
javascript
|
Bogdan-Lyashenko/js-code-to-svg-flowchart
|
cli/index.cli.js
|
https://github.com/Bogdan-Lyashenko/js-code-to-svg-flowchart/blob/master/cli/index.cli.js
|
MIT
|
writeToFile = function(filePath, data) {
fs.writeFile(filePath, data, function(err) {
if (err) {
return console.error(err);
}
console.log(`SVG file was created: ${filePath}`);
});
}
|
@description Write data to the specified file path.
@param {string} filePath Path of the destination file
@param {*} data Data to write to the destination
|
writeToFile
|
javascript
|
Bogdan-Lyashenko/js-code-to-svg-flowchart
|
cli/index.cli.js
|
https://github.com/Bogdan-Lyashenko/js-code-to-svg-flowchart/blob/master/cli/index.cli.js
|
MIT
|
createSvgFile = function(file, code) {
const svg = js2flowchart.convertCodeToSvg(code),
filePath = `${file}.svg`;
writeToFile(filePath, svg);
}
|
@description Write data to the specified file path.
@param {string} filePath Path of the destination file
@param {*} data Data to write to the destination
|
createSvgFile
|
javascript
|
Bogdan-Lyashenko/js-code-to-svg-flowchart
|
cli/index.cli.js
|
https://github.com/Bogdan-Lyashenko/js-code-to-svg-flowchart/blob/master/cli/index.cli.js
|
MIT
|
createSvgFile = function(file, code) {
const svg = js2flowchart.convertCodeToSvg(code),
filePath = `${file}.svg`;
writeToFile(filePath, svg);
}
|
@description Write data to the specified file path.
@param {string} filePath Path of the destination file
@param {*} data Data to write to the destination
|
createSvgFile
|
javascript
|
Bogdan-Lyashenko/js-code-to-svg-flowchart
|
cli/index.cli.js
|
https://github.com/Bogdan-Lyashenko/js-code-to-svg-flowchart/blob/master/cli/index.cli.js
|
MIT
|
createAbstractedSvgFile = function(file, code, abstractionLevel) {
const errMsg =
'Please use (case insensitive, without the quotes): "function", "function_dependencies", "class", "import" or "export"';
if (!abstractionLevel) return console.error(`No abstraction level specified`);
const flowTreeBuilder = js2flowchart.createFlowTreeBuilder();
//Check if the abstraction level(s) are valid and process them
let abstractions = abstractionLevel.map(al => {
try {
return js2flowchart.ABSTRACTION_LEVELS[al.toUpperCase()];
} catch (err) {
throw new Error(`The following abstraction level isn't valid: ${al}\n${errMsg}`);
}
});
flowTreeBuilder.setAbstractionLevel(abstractions);
const flowTree = flowTreeBuilder.build(code);
const svg = js2flowchart.convertFlowTreeToSvg(flowTree),
filePath = `${file}.svg`;
writeToFile(filePath, svg);
}
|
@description Create an SVG file with the provided abstraction level
@param {string} file Name of the JS script
@param {string} code JS code of the JS script
@param {...string} abstractionLevel Abstraction levels (function, function dependencies, class, import, export)
@return undefined
|
createAbstractedSvgFile
|
javascript
|
Bogdan-Lyashenko/js-code-to-svg-flowchart
|
cli/index.cli.js
|
https://github.com/Bogdan-Lyashenko/js-code-to-svg-flowchart/blob/master/cli/index.cli.js
|
MIT
|
createAbstractedSvgFile = function(file, code, abstractionLevel) {
const errMsg =
'Please use (case insensitive, without the quotes): "function", "function_dependencies", "class", "import" or "export"';
if (!abstractionLevel) return console.error(`No abstraction level specified`);
const flowTreeBuilder = js2flowchart.createFlowTreeBuilder();
//Check if the abstraction level(s) are valid and process them
let abstractions = abstractionLevel.map(al => {
try {
return js2flowchart.ABSTRACTION_LEVELS[al.toUpperCase()];
} catch (err) {
throw new Error(`The following abstraction level isn't valid: ${al}\n${errMsg}`);
}
});
flowTreeBuilder.setAbstractionLevel(abstractions);
const flowTree = flowTreeBuilder.build(code);
const svg = js2flowchart.convertFlowTreeToSvg(flowTree),
filePath = `${file}.svg`;
writeToFile(filePath, svg);
}
|
@description Create an SVG file with the provided abstraction level
@param {string} file Name of the JS script
@param {string} code JS code of the JS script
@param {...string} abstractionLevel Abstraction levels (function, function dependencies, class, import, export)
@return undefined
|
createAbstractedSvgFile
|
javascript
|
Bogdan-Lyashenko/js-code-to-svg-flowchart
|
cli/index.cli.js
|
https://github.com/Bogdan-Lyashenko/js-code-to-svg-flowchart/blob/master/cli/index.cli.js
|
MIT
|
filterDNAOptions = (_dna) => {
const dnaItems = _dna.split(DNA_DELIMITER);
const filteredDNA = dnaItems.filter((element) => {
const query = /(\?.*$)/;
const querystring = query.exec(element);
if (!querystring) {
return true;
}
const options = querystring[1].split("&").reduce((r, setting) => {
const keyPairs = setting.split("=");
return { ...r, [keyPairs[0]]: keyPairs[1] };
}, []);
return options.bypassDNA;
});
return filteredDNA.join(DNA_DELIMITER);
}
|
In some cases a DNA string may contain optional query parameters for options
such as bypassing the DNA isUnique check, this function filters out those
items without modifying the stored DNA.
@param {String} _dna New DNA string
@returns new DNA string with any items that should be filtered, removed.
|
filterDNAOptions
|
javascript
|
HashLips/hashlips_art_engine
|
src/main.js
|
https://github.com/HashLips/hashlips_art_engine/blob/master/src/main.js
|
MIT
|
filterDNAOptions = (_dna) => {
const dnaItems = _dna.split(DNA_DELIMITER);
const filteredDNA = dnaItems.filter((element) => {
const query = /(\?.*$)/;
const querystring = query.exec(element);
if (!querystring) {
return true;
}
const options = querystring[1].split("&").reduce((r, setting) => {
const keyPairs = setting.split("=");
return { ...r, [keyPairs[0]]: keyPairs[1] };
}, []);
return options.bypassDNA;
});
return filteredDNA.join(DNA_DELIMITER);
}
|
In some cases a DNA string may contain optional query parameters for options
such as bypassing the DNA isUnique check, this function filters out those
items without modifying the stored DNA.
@param {String} _dna New DNA string
@returns new DNA string with any items that should be filtered, removed.
|
filterDNAOptions
|
javascript
|
HashLips/hashlips_art_engine
|
src/main.js
|
https://github.com/HashLips/hashlips_art_engine/blob/master/src/main.js
|
MIT
|
removeQueryStrings = (_dna) => {
const query = /(\?.*$)/;
return _dna.replace(query, "");
}
|
Cleaning function for DNA strings. When DNA strings include an option, it
is added to the filename with a ?setting=value query string. It needs to be
removed to properly access the file name before Drawing.
@param {String} _dna The entire newDNA string
@returns Cleaned DNA string without querystring parameters.
|
removeQueryStrings
|
javascript
|
HashLips/hashlips_art_engine
|
src/main.js
|
https://github.com/HashLips/hashlips_art_engine/blob/master/src/main.js
|
MIT
|
removeQueryStrings = (_dna) => {
const query = /(\?.*$)/;
return _dna.replace(query, "");
}
|
Cleaning function for DNA strings. When DNA strings include an option, it
is added to the filename with a ?setting=value query string. It needs to be
removed to properly access the file name before Drawing.
@param {String} _dna The entire newDNA string
@returns Cleaned DNA string without querystring parameters.
|
removeQueryStrings
|
javascript
|
HashLips/hashlips_art_engine
|
src/main.js
|
https://github.com/HashLips/hashlips_art_engine/blob/master/src/main.js
|
MIT
|
isDnaUnique = (_DnaList = new Set(), _dna = "") => {
const _filteredDNA = filterDNAOptions(_dna);
return !_DnaList.has(_filteredDNA);
}
|
Cleaning function for DNA strings. When DNA strings include an option, it
is added to the filename with a ?setting=value query string. It needs to be
removed to properly access the file name before Drawing.
@param {String} _dna The entire newDNA string
@returns Cleaned DNA string without querystring parameters.
|
isDnaUnique
|
javascript
|
HashLips/hashlips_art_engine
|
src/main.js
|
https://github.com/HashLips/hashlips_art_engine/blob/master/src/main.js
|
MIT
|
isDnaUnique = (_DnaList = new Set(), _dna = "") => {
const _filteredDNA = filterDNAOptions(_dna);
return !_DnaList.has(_filteredDNA);
}
|
Cleaning function for DNA strings. When DNA strings include an option, it
is added to the filename with a ?setting=value query string. It needs to be
removed to properly access the file name before Drawing.
@param {String} _dna The entire newDNA string
@returns Cleaned DNA string without querystring parameters.
|
isDnaUnique
|
javascript
|
HashLips/hashlips_art_engine
|
src/main.js
|
https://github.com/HashLips/hashlips_art_engine/blob/master/src/main.js
|
MIT
|
createDna = (_layers) => {
let randNum = [];
_layers.forEach((layer) => {
var totalWeight = 0;
layer.elements.forEach((element) => {
totalWeight += element.weight;
});
// number between 0 - totalWeight
let random = Math.floor(Math.random() * totalWeight);
for (var i = 0; i < layer.elements.length; i++) {
// subtract the current weight from the random weight until we reach a sub zero value.
random -= layer.elements[i].weight;
if (random < 0) {
return randNum.push(
`${layer.elements[i].id}:${layer.elements[i].filename}${
layer.bypassDNA ? "?bypassDNA=true" : ""
}`
);
}
}
});
return randNum.join(DNA_DELIMITER);
}
|
Cleaning function for DNA strings. When DNA strings include an option, it
is added to the filename with a ?setting=value query string. It needs to be
removed to properly access the file name before Drawing.
@param {String} _dna The entire newDNA string
@returns Cleaned DNA string without querystring parameters.
|
createDna
|
javascript
|
HashLips/hashlips_art_engine
|
src/main.js
|
https://github.com/HashLips/hashlips_art_engine/blob/master/src/main.js
|
MIT
|
createDna = (_layers) => {
let randNum = [];
_layers.forEach((layer) => {
var totalWeight = 0;
layer.elements.forEach((element) => {
totalWeight += element.weight;
});
// number between 0 - totalWeight
let random = Math.floor(Math.random() * totalWeight);
for (var i = 0; i < layer.elements.length; i++) {
// subtract the current weight from the random weight until we reach a sub zero value.
random -= layer.elements[i].weight;
if (random < 0) {
return randNum.push(
`${layer.elements[i].id}:${layer.elements[i].filename}${
layer.bypassDNA ? "?bypassDNA=true" : ""
}`
);
}
}
});
return randNum.join(DNA_DELIMITER);
}
|
Cleaning function for DNA strings. When DNA strings include an option, it
is added to the filename with a ?setting=value query string. It needs to be
removed to properly access the file name before Drawing.
@param {String} _dna The entire newDNA string
@returns Cleaned DNA string without querystring parameters.
|
createDna
|
javascript
|
HashLips/hashlips_art_engine
|
src/main.js
|
https://github.com/HashLips/hashlips_art_engine/blob/master/src/main.js
|
MIT
|
writeMetaData = (_data) => {
fs.writeFileSync(`${buildDir}/json/_metadata.json`, _data);
}
|
Cleaning function for DNA strings. When DNA strings include an option, it
is added to the filename with a ?setting=value query string. It needs to be
removed to properly access the file name before Drawing.
@param {String} _dna The entire newDNA string
@returns Cleaned DNA string without querystring parameters.
|
writeMetaData
|
javascript
|
HashLips/hashlips_art_engine
|
src/main.js
|
https://github.com/HashLips/hashlips_art_engine/blob/master/src/main.js
|
MIT
|
writeMetaData = (_data) => {
fs.writeFileSync(`${buildDir}/json/_metadata.json`, _data);
}
|
Cleaning function for DNA strings. When DNA strings include an option, it
is added to the filename with a ?setting=value query string. It needs to be
removed to properly access the file name before Drawing.
@param {String} _dna The entire newDNA string
@returns Cleaned DNA string without querystring parameters.
|
writeMetaData
|
javascript
|
HashLips/hashlips_art_engine
|
src/main.js
|
https://github.com/HashLips/hashlips_art_engine/blob/master/src/main.js
|
MIT
|
saveMetaDataSingleFile = (_editionCount) => {
let metadata = metadataList.find((meta) => meta.edition == _editionCount);
debugLogs
? console.log(
`Writing metadata for ${_editionCount}: ${JSON.stringify(metadata)}`
)
: null;
fs.writeFileSync(
`${buildDir}/json/${_editionCount}.json`,
JSON.stringify(metadata, null, 2)
);
}
|
Cleaning function for DNA strings. When DNA strings include an option, it
is added to the filename with a ?setting=value query string. It needs to be
removed to properly access the file name before Drawing.
@param {String} _dna The entire newDNA string
@returns Cleaned DNA string without querystring parameters.
|
saveMetaDataSingleFile
|
javascript
|
HashLips/hashlips_art_engine
|
src/main.js
|
https://github.com/HashLips/hashlips_art_engine/blob/master/src/main.js
|
MIT
|
saveMetaDataSingleFile = (_editionCount) => {
let metadata = metadataList.find((meta) => meta.edition == _editionCount);
debugLogs
? console.log(
`Writing metadata for ${_editionCount}: ${JSON.stringify(metadata)}`
)
: null;
fs.writeFileSync(
`${buildDir}/json/${_editionCount}.json`,
JSON.stringify(metadata, null, 2)
);
}
|
Cleaning function for DNA strings. When DNA strings include an option, it
is added to the filename with a ?setting=value query string. It needs to be
removed to properly access the file name before Drawing.
@param {String} _dna The entire newDNA string
@returns Cleaned DNA string without querystring parameters.
|
saveMetaDataSingleFile
|
javascript
|
HashLips/hashlips_art_engine
|
src/main.js
|
https://github.com/HashLips/hashlips_art_engine/blob/master/src/main.js
|
MIT
|
function shuffle(array) {
let currentIndex = array.length,
randomIndex;
while (currentIndex != 0) {
randomIndex = Math.floor(Math.random() * currentIndex);
currentIndex--;
[array[currentIndex], array[randomIndex]] = [
array[randomIndex],
array[currentIndex],
];
}
return array;
}
|
Cleaning function for DNA strings. When DNA strings include an option, it
is added to the filename with a ?setting=value query string. It needs to be
removed to properly access the file name before Drawing.
@param {String} _dna The entire newDNA string
@returns Cleaned DNA string without querystring parameters.
|
shuffle
|
javascript
|
HashLips/hashlips_art_engine
|
src/main.js
|
https://github.com/HashLips/hashlips_art_engine/blob/master/src/main.js
|
MIT
|
startCreating = async () => {
let layerConfigIndex = 0;
let editionCount = 1;
let failedCount = 0;
let abstractedIndexes = [];
for (
let i = network == NETWORK.sol ? 0 : 1;
i <= layerConfigurations[layerConfigurations.length - 1].growEditionSizeTo;
i++
) {
abstractedIndexes.push(i);
}
if (shuffleLayerConfigurations) {
abstractedIndexes = shuffle(abstractedIndexes);
}
debugLogs
? console.log("Editions left to create: ", abstractedIndexes)
: null;
while (layerConfigIndex < layerConfigurations.length) {
const layers = layersSetup(
layerConfigurations[layerConfigIndex].layersOrder
);
while (
editionCount <= layerConfigurations[layerConfigIndex].growEditionSizeTo
) {
let newDna = createDna(layers);
if (isDnaUnique(dnaList, newDna)) {
let results = constructLayerToDna(newDna, layers);
let loadedElements = [];
results.forEach((layer) => {
loadedElements.push(loadLayerImg(layer));
});
await Promise.all(loadedElements).then((renderObjectArray) => {
debugLogs ? console.log("Clearing canvas") : null;
ctx.clearRect(0, 0, format.width, format.height);
if (gif.export) {
hashlipsGiffer = new HashlipsGiffer(
canvas,
ctx,
`${buildDir}/gifs/${abstractedIndexes[0]}.gif`,
gif.repeat,
gif.quality,
gif.delay
);
hashlipsGiffer.start();
}
if (background.generate) {
drawBackground();
}
renderObjectArray.forEach((renderObject, index) => {
drawElement(
renderObject,
index,
layerConfigurations[layerConfigIndex].layersOrder.length
);
if (gif.export) {
hashlipsGiffer.add();
}
});
if (gif.export) {
hashlipsGiffer.stop();
}
debugLogs
? console.log("Editions left to create: ", abstractedIndexes)
: null;
saveImage(abstractedIndexes[0]);
addMetadata(newDna, abstractedIndexes[0]);
saveMetaDataSingleFile(abstractedIndexes[0]);
console.log(
`Created edition: ${abstractedIndexes[0]}, with DNA: ${sha1(
newDna
)}`
);
});
dnaList.add(filterDNAOptions(newDna));
editionCount++;
abstractedIndexes.shift();
} else {
console.log("DNA exists!");
failedCount++;
if (failedCount >= uniqueDnaTorrance) {
console.log(
`You need more layers or elements to grow your edition to ${layerConfigurations[layerConfigIndex].growEditionSizeTo} artworks!`
);
process.exit();
}
}
}
layerConfigIndex++;
}
writeMetaData(JSON.stringify(metadataList, null, 2));
}
|
Cleaning function for DNA strings. When DNA strings include an option, it
is added to the filename with a ?setting=value query string. It needs to be
removed to properly access the file name before Drawing.
@param {String} _dna The entire newDNA string
@returns Cleaned DNA string without querystring parameters.
|
startCreating
|
javascript
|
HashLips/hashlips_art_engine
|
src/main.js
|
https://github.com/HashLips/hashlips_art_engine/blob/master/src/main.js
|
MIT
|
startCreating = async () => {
let layerConfigIndex = 0;
let editionCount = 1;
let failedCount = 0;
let abstractedIndexes = [];
for (
let i = network == NETWORK.sol ? 0 : 1;
i <= layerConfigurations[layerConfigurations.length - 1].growEditionSizeTo;
i++
) {
abstractedIndexes.push(i);
}
if (shuffleLayerConfigurations) {
abstractedIndexes = shuffle(abstractedIndexes);
}
debugLogs
? console.log("Editions left to create: ", abstractedIndexes)
: null;
while (layerConfigIndex < layerConfigurations.length) {
const layers = layersSetup(
layerConfigurations[layerConfigIndex].layersOrder
);
while (
editionCount <= layerConfigurations[layerConfigIndex].growEditionSizeTo
) {
let newDna = createDna(layers);
if (isDnaUnique(dnaList, newDna)) {
let results = constructLayerToDna(newDna, layers);
let loadedElements = [];
results.forEach((layer) => {
loadedElements.push(loadLayerImg(layer));
});
await Promise.all(loadedElements).then((renderObjectArray) => {
debugLogs ? console.log("Clearing canvas") : null;
ctx.clearRect(0, 0, format.width, format.height);
if (gif.export) {
hashlipsGiffer = new HashlipsGiffer(
canvas,
ctx,
`${buildDir}/gifs/${abstractedIndexes[0]}.gif`,
gif.repeat,
gif.quality,
gif.delay
);
hashlipsGiffer.start();
}
if (background.generate) {
drawBackground();
}
renderObjectArray.forEach((renderObject, index) => {
drawElement(
renderObject,
index,
layerConfigurations[layerConfigIndex].layersOrder.length
);
if (gif.export) {
hashlipsGiffer.add();
}
});
if (gif.export) {
hashlipsGiffer.stop();
}
debugLogs
? console.log("Editions left to create: ", abstractedIndexes)
: null;
saveImage(abstractedIndexes[0]);
addMetadata(newDna, abstractedIndexes[0]);
saveMetaDataSingleFile(abstractedIndexes[0]);
console.log(
`Created edition: ${abstractedIndexes[0]}, with DNA: ${sha1(
newDna
)}`
);
});
dnaList.add(filterDNAOptions(newDna));
editionCount++;
abstractedIndexes.shift();
} else {
console.log("DNA exists!");
failedCount++;
if (failedCount >= uniqueDnaTorrance) {
console.log(
`You need more layers or elements to grow your edition to ${layerConfigurations[layerConfigIndex].growEditionSizeTo} artworks!`
);
process.exit();
}
}
}
layerConfigIndex++;
}
writeMetaData(JSON.stringify(metadataList, null, 2));
}
|
Cleaning function for DNA strings. When DNA strings include an option, it
is added to the filename with a ?setting=value query string. It needs to be
removed to properly access the file name before Drawing.
@param {String} _dna The entire newDNA string
@returns Cleaned DNA string without querystring parameters.
|
startCreating
|
javascript
|
HashLips/hashlips_art_engine
|
src/main.js
|
https://github.com/HashLips/hashlips_art_engine/blob/master/src/main.js
|
MIT
|
function downloadFromOverpass (
queryName,
overpassConfig,
filename,
overpassDownloadCallback
) {
let query = '[out:json][timeout:60];('
if (overpassConfig.way) {
query += 'way'
} else {
query += 'relation'
}
const queryKeys = Object.keys(overpassConfig)
for (let i = queryKeys.length - 1; i >= 0; i--) {
const k = queryKeys[i]
if (k === 'way') continue
const v = overpassConfig[k]
query += '["' + k + '"="' + v + '"]'
}
query += ';);out body;>;out meta qt;'
// query-overpass sometimes makes duplicate callbacks, so keep track of the callbacks and
// only do a next action once.
let curOverpassQueryAttempt = 0
const overpassAttempts = {}
asynclib.auto({
fetchFromOverpassIfNeeded: function (cb) {
console.log('downloading from overpass')
fetchIfNeeded(filename, overpassDownloadCallback, cb, function () {
const overpassResponseHandler = function (err, data, overpassAttempt) {
if (overpassAttempts[overpassAttempt]) {
// Skip duplicate callback
return
}
overpassAttempts[overpassAttempt] = true
if (err) {
console.log(err)
console.log('Increasing overpass request gap')
curRequestGap *= 2
makeQuery()
} else {
console.log('Success, decreasing overpass request gap')
curRequestGap = Math.max(minRequestGap, curRequestGap / 2)
cb(null, data)
}
}
const makeQuery = function () {
console.log('waiting ' + curRequestGap + ' seconds')
setTimeout(function () {
curOverpassQueryAttempt++
overpass(
query,
(err, data) => overpassResponseHandler(err, data, curOverpassQueryAttempt),
{ flatProperties: true }
)
}, curRequestGap * 1000)
}
makeQuery()
})
},
validateOverpassResult: ['fetchFromOverpassIfNeeded', function (results, cb) {
const data = results.fetchFromOverpassIfNeeded
if (!data.features) {
const err = new Error(`Invalid geojson from overpass for query: ${queryName}`)
return cb(err)
}
if (data.features.length === 0) {
console.error('No data for the following query:')
console.error(query)
console.error('To read more about this error, please visit https://git.io/vxKQL')
return cb(new Error('No data found for from overpass query'))
}
cb()
}],
saveSingleMultiPolygon: ['validateOverpassResult', function (results, cb) {
const data = results.fetchFromOverpassIfNeeded
let combined
// union all multi-polygons / polygons into one
for (let i = data.features.length - 1; i >= 0; i--) {
const curOsmGeom = data.features[i].geometry
const curOsmProps = data.features[i].properties
if (
(curOsmGeom.type === 'Polygon' || curOsmGeom.type === 'MultiPolygon') &&
curOsmProps.type === 'boundary' // need to make sure enclaves aren't unioned
) {
console.log('combining border')
let errors = geojsonhint.hint(curOsmGeom)
if (errors && errors.length > 0) {
const stringifiedGeojson = JSON.stringify(curOsmGeom, null, 2)
errors = geojsonhint.hint(stringifiedGeojson)
console.error('Invalid geojson received in Overpass Result')
console.error('Overpass query: ' + query)
const problemFilename = `${queryName}_convert_to_geom_error.json`
fs.writeFileSync(problemFilename, stringifiedGeojson)
console.error('saved problem file to ' + problemFilename)
console.error('To read more about this error, please visit https://git.io/vxKQq')
return cb(errors)
}
let curGeom
try {
curGeom = geoJsonToGeom(curOsmGeom)
} catch (e) {
console.error('error converting overpass result to geojson')
console.error(e)
fs.writeFileSync(
`${queryName}_convert_to_geom_error-all-features.json`,
JSON.stringify(data)
)
return cb(e)
}
if (!combined) {
combined = curGeom
} else {
combined = debugGeo('union', curGeom, combined)
}
}
}
try {
fs.writeFile(filename, geomToGeoJsonString(combined), cb)
} catch (e) {
console.error('error writing combined border to geojson')
fs.writeFileSync(
queryName + '_combined_border_convert_to_geom_error.json',
JSON.stringify(data)
)
return cb(e)
}
}]
}, overpassDownloadCallback)
}
|
Download something from overpass and convert it into GeoJSON.
@param {string} queryName Name of the query (for debugging purposes)
@param {object} overpassConfig Config used to build overpass query
@param {string} filename Filename to save result to
@param {function} overpassDownloadCallback The callback to call when done
|
downloadFromOverpass
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
overpassResponseHandler = function (err, data, overpassAttempt) {
if (overpassAttempts[overpassAttempt]) {
// Skip duplicate callback
return
}
overpassAttempts[overpassAttempt] = true
if (err) {
console.log(err)
console.log('Increasing overpass request gap')
curRequestGap *= 2
makeQuery()
} else {
console.log('Success, decreasing overpass request gap')
curRequestGap = Math.max(minRequestGap, curRequestGap / 2)
cb(null, data)
}
}
|
Download something from overpass and convert it into GeoJSON.
@param {string} queryName Name of the query (for debugging purposes)
@param {object} overpassConfig Config used to build overpass query
@param {string} filename Filename to save result to
@param {function} overpassDownloadCallback The callback to call when done
|
overpassResponseHandler
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
overpassResponseHandler = function (err, data, overpassAttempt) {
if (overpassAttempts[overpassAttempt]) {
// Skip duplicate callback
return
}
overpassAttempts[overpassAttempt] = true
if (err) {
console.log(err)
console.log('Increasing overpass request gap')
curRequestGap *= 2
makeQuery()
} else {
console.log('Success, decreasing overpass request gap')
curRequestGap = Math.max(minRequestGap, curRequestGap / 2)
cb(null, data)
}
}
|
Download something from overpass and convert it into GeoJSON.
@param {string} queryName Name of the query (for debugging purposes)
@param {object} overpassConfig Config used to build overpass query
@param {string} filename Filename to save result to
@param {function} overpassDownloadCallback The callback to call when done
|
overpassResponseHandler
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
makeQuery = function () {
console.log('waiting ' + curRequestGap + ' seconds')
setTimeout(function () {
curOverpassQueryAttempt++
overpass(
query,
(err, data) => overpassResponseHandler(err, data, curOverpassQueryAttempt),
{ flatProperties: true }
)
}, curRequestGap * 1000)
}
|
Download something from overpass and convert it into GeoJSON.
@param {string} queryName Name of the query (for debugging purposes)
@param {object} overpassConfig Config used to build overpass query
@param {string} filename Filename to save result to
@param {function} overpassDownloadCallback The callback to call when done
|
makeQuery
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
makeQuery = function () {
console.log('waiting ' + curRequestGap + ' seconds')
setTimeout(function () {
curOverpassQueryAttempt++
overpass(
query,
(err, data) => overpassResponseHandler(err, data, curOverpassQueryAttempt),
{ flatProperties: true }
)
}, curRequestGap * 1000)
}
|
Download something from overpass and convert it into GeoJSON.
@param {string} queryName Name of the query (for debugging purposes)
@param {object} overpassConfig Config used to build overpass query
@param {string} filename Filename to save result to
@param {function} overpassDownloadCallback The callback to call when done
|
makeQuery
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function downloadOsmBoundary (boundaryId, boundaryCallback) {
const boundaryFilename = downloadsDir + '/' + boundaryId + '.json'
downloadProgress.beginTask(`getting data for ${boundaryId}`, true)
downloadFromOverpass(
boundaryId,
osmBoundarySources[boundaryId],
boundaryFilename,
boundaryCallback
)
}
|
Download something from overpass and convert it into GeoJSON.
@param {string} queryName Name of the query (for debugging purposes)
@param {object} overpassConfig Config used to build overpass query
@param {string} filename Filename to save result to
@param {function} overpassDownloadCallback The callback to call when done
|
downloadOsmBoundary
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function downloadOsmTimezoneBoundary (tzId, boundaryCallback) {
const tzBoundayName = `${tzId.replace(/\//g, '-')}-tz`
const boundaryFilename = path.join(downloadsDir, `${tzBoundayName}.json`)
const workingBoundaryFilename = path.join(osmDownloadDir, `${tzBoundayName}.json`)
downloadOSMZoneProgress.beginTask(`getting data for ${tzBoundayName}`, true)
// the downloads directory is cleared of all timezone boundaries not downloaded from OSM (there
// are still a few in here with manual definitions). Therefore, keep a copy of all OSM downloads
// so they aren't redownloaded during multiple reruns of the script
function copyToOsmDownloadFolder (err) {
if (err) return boundaryCallback(err)
fs.copyFile(boundaryFilename, workingBoundaryFilename, boundaryCallback)
}
// Before downloading from Overpass, check if there's a copy in the working folder. Since osm
// downloads are always after production zones, it is safe to copy an osm boundary because a
// production one would've already been downloaded in the event it were deleted in order to force
// the retrieval of a new zone.
fs.stat(
boundaryFilename,
(err, stats) => {
if (!err) {
// file found, initiate eventual callback
return copyToOsmDownloadFolder()
}
// check for file in working dir
fs.stat(
workingBoundaryFilename,
(err, stats) => {
if (!err) {
// file exists, copy over
return fs.copyFile(workingBoundaryFilename, boundaryFilename, boundaryCallback)
}
// file doesn't exist, download from overpass
downloadFromOverpass(
tzBoundayName,
{ timezone: tzId },
boundaryFilename,
err => {
if (err) {
// assume no data or unparseable data, write a null island
fs.writeFile(
boundaryFilename,
JSON.stringify(
{
type: 'Polygon',
coordinates: [
[[-0.1, -0.1], [0.1, -0.1], [0.1, 0.1], [-0.1, 0.1], [-0.1, -0.1]]
]
}
),
copyToOsmDownloadFolder
)
} else {
copyToOsmDownloadFolder()
}
}
)
}
)
}
)
}
|
Download something from overpass and convert it into GeoJSON.
@param {string} queryName Name of the query (for debugging purposes)
@param {object} overpassConfig Config used to build overpass query
@param {string} filename Filename to save result to
@param {function} overpassDownloadCallback The callback to call when done
|
downloadOsmTimezoneBoundary
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function copyToOsmDownloadFolder (err) {
if (err) return boundaryCallback(err)
fs.copyFile(boundaryFilename, workingBoundaryFilename, boundaryCallback)
}
|
Download something from overpass and convert it into GeoJSON.
@param {string} queryName Name of the query (for debugging purposes)
@param {object} overpassConfig Config used to build overpass query
@param {string} filename Filename to save result to
@param {function} overpassDownloadCallback The callback to call when done
|
copyToOsmDownloadFolder
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function safeTzFilename (tzid) {
return tzid.replace(/\//g, '__')
}
|
Download something from overpass and convert it into GeoJSON.
@param {string} queryName Name of the query (for debugging purposes)
@param {object} overpassConfig Config used to build overpass query
@param {string} filename Filename to save result to
@param {function} overpassDownloadCallback The callback to call when done
|
safeTzFilename
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function getFinalTzOutputFilename (tzid) {
return path.join(workingDir, `${safeTzFilename(tzid)}.json`)
}
|
Download something from overpass and convert it into GeoJSON.
@param {string} queryName Name of the query (for debugging purposes)
@param {object} overpassConfig Config used to build overpass query
@param {string} filename Filename to save result to
@param {function} overpassDownloadCallback The callback to call when done
|
getFinalTzOutputFilename
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function getFinal1970TzOutputFilename (tzid) {
return path.join(workingDir, `${safeTzFilename(tzid)}-1970.json`)
}
|
Download something from overpass and convert it into GeoJSON.
@param {string} queryName Name of the query (for debugging purposes)
@param {object} overpassConfig Config used to build overpass query
@param {string} filename Filename to save result to
@param {function} overpassDownloadCallback The callback to call when done
|
getFinal1970TzOutputFilename
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function getFinalNowTzOutputFilename (tzid) {
return path.join(workingDir, `${safeTzFilename(tzid)}-now.json`)
}
|
Download something from overpass and convert it into GeoJSON.
@param {string} queryName Name of the query (for debugging purposes)
@param {object} overpassConfig Config used to build overpass query
@param {string} filename Filename to save result to
@param {function} overpassDownloadCallback The callback to call when done
|
getFinalNowTzOutputFilename
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function getSourceDownloadName (id) {
return downloadsDir + '/' + id + '.json'
}
|
Download something from overpass and convert it into GeoJSON.
@param {string} queryName Name of the query (for debugging purposes)
@param {object} overpassConfig Config used to build overpass query
@param {string} filename Filename to save result to
@param {function} overpassDownloadCallback The callback to call when done
|
getSourceDownloadName
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function getDataSource (source) {
let geoJson
if (source.source === 'overpass') {
geoJson = require(getSourceDownloadName(source.id))
} else if (source.source === 'manual-polygon') {
geoJson = polygon(source.data).geometry
} else if (source.source === 'manual-multipolygon') {
geoJson = multiPolygon(source.data).geometry
} else if (source.source === 'final') {
geoJson = require(getFinalTzOutputFilename(source.id))
} else if (source.source === 'final1970') {
geoJson = require(getFinal1970TzOutputFilename(source.id))
} else if (source.source === 'finalNow') {
geoJson = require(getFinalNowTzOutputFilename(source.id))
} else {
const err = new Error('unknown source: ' + source.source)
throw err
}
return geoJsonToGeom(geoJson)
}
|
Get the geometry of the requested source data
@return {Object} geom The geometry of the source
@param {Object} source An object representing the data source
must have `source` key and then either:
- `id` if from a file
- `id` if from a file
|
getDataSource
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function postProcessZone (geom, returnAsObject) {
// reduce precision of geometry
const geojson = geomToGeoJson(precisionReducer.reduce(geom))
// iterate through all polygons
const filteredPolygons = []
let allPolygons = geojson.coordinates
if (geojson.type === 'Polygon') {
allPolygons = [geojson.coordinates]
}
allPolygons.forEach((curPolygon, idx) => {
// remove any polygon with very small area
const polygonFeature = polygon(curPolygon)
const polygonArea = area.geometry(polygonFeature.geometry)
if (polygonArea < 1) return
// find all holes
const filteredLinearRings = []
curPolygon.forEach((curLinearRing, lrIdx) => {
if (lrIdx === 0) {
// always keep first linearRing
filteredLinearRings.push(curLinearRing)
} else {
const polygonFromLinearRing = polygon([curLinearRing])
const linearRingArea = area.geometry(polygonFromLinearRing.geometry)
// only include holes with relevant area
if (linearRingArea > 1) {
filteredLinearRings.push(curLinearRing)
}
}
})
filteredPolygons.push(filteredLinearRings)
})
// recompile to geojson string
const newGeojson = {
type: geojson.type
}
if (geojson.type === 'Polygon') {
newGeojson.coordinates = filteredPolygons[0]
} else {
newGeojson.coordinates = filteredPolygons
}
return returnAsObject ? newGeojson : JSON.stringify(newGeojson)
}
|
Post process created timezone boundary.
- remove small holes and exclaves
- reduce geometry precision
@param {Geometry} geom The jsts geometry of the timezone
@param {boolean} returnAsObject if true, return as object, otherwise return stringified
@return {Object|String} geojson as object or stringified
|
postProcessZone
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function makeTimezoneBoundaries (callback) {
// load cache if available
const tzBoundaryCache = new FileLookupCache({
filename: path.join(cacheDir, 'boundary-creation-cache.json')
})
tzBoundaryCache.init(() => {
asynclib.each(
Object.keys(zoneCfg),
(tzid, cb) => {
buildingProgress.beginTask(`makeTimezoneBoundary for ${tzid}`, true)
const tzFilename = getFinalTzOutputFilename(tzid)
const ops = zoneCfg[tzid]
let geom
asynclib.map(
ops,
(op, opCb) => {
const newOp = cloneDeep(op)
if (op.source === 'overpass') {
hasha.fromFile(getSourceDownloadName(op.id))
.then(val => {
newOp.source = val
opCb(null, newOp)
})
.catch(opCb)
} else {
opCb(null, newOp)
}
},
(err, hashableOps) => {
if (err) return cb(err)
tzBoundaryCache.calculate({
cacheKey: hashMd5(hashableOps),
outputFilename: tzFilename,
calculateFn: calculateCb => {
console.log(`makeTimezoneBoundary for ${tzid}`)
asynclib.eachSeries(
ops,
(task, taskCb) => {
const taskData = getDataSource(task)
console.log('-', task.op, task.id)
if (task.op === 'init') {
geom = taskData
} else if (task.op === 'intersect') {
geom = debugGeo('intersection', geom, taskData)
} else if (task.op === 'difference') {
geom = debugGeo('diff', geom, taskData)
} else if (task.op === 'difference-reverse-order') {
geom = debugGeo('diff', taskData, geom)
} else if (task.op === 'union') {
geom = debugGeo('union', geom, taskData)
} else {
const err = new Error('unknown op: ' + task.op)
return taskCb(err)
}
taskCb()
},
opsErr => {
if (opsErr) return calculateCb(err)
calculateCb(null, postProcessZone(geom))
}
)
},
callback: cb
})
}
)
},
err => {
if (err) return callback(err)
tzBoundaryCache.end(callback)
}
)
})
}
|
Post process created timezone boundary.
- remove small holes and exclaves
- reduce geometry precision
@param {Geometry} geom The jsts geometry of the timezone
@param {boolean} returnAsObject if true, return as object, otherwise return stringified
@return {Object|String} geojson as object or stringified
|
makeTimezoneBoundaries
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function makeDerivedTimezoneBoundaries (strategy, callback) {
const cfg = (
strategy === '1970'
? {
cacheFilename: path.join(cacheDir, 'derived-1970-cache.json'),
derivedZoneConfig: zoneCfg1970,
getFinalTzFilenameFn: getFinal1970TzOutputFilename,
loadZonesInMemoryFn: loadFinal1970ZonesIntoMemory,
progressStatsName: 'Building 1970 zones',
progressStatsUpdatePrefix: 'make1970TimezoneBoundary for'
}
: {
cacheFilename: path.join(cacheDir, 'derived-now-cache.json'),
derivedZoneConfig: zoneCfgNow,
getFinalTzFilenameFn: getFinalNowTzOutputFilename,
loadZonesInMemoryFn: loadFinalNowZonesIntoMemory,
progressStatsName: 'Building Now zones',
progressStatsUpdatePrefix: 'makeNowTimezoneBoundary for'
}
)
const buildingProgress = new ProgressStats(
cfg.progressStatsName,
Object.keys(cfg.derivedZoneConfig).length
)
// load cache if available
const tzBoundaryCache = new FileLookupCache({
filename: cfg.cacheFilename
})
tzBoundaryCache.init(() => {
asynclib.each(
Object.keys(cfg.derivedZoneConfig),
(tzid, cb) => {
const message = `${cfg.progressStatsUpdatePrefix} ${tzid}`
buildingProgress.beginTask(message, true)
tzBoundaryCache.calculate({
cacheKey: hashMd5(cfg.derivedZoneConfig[tzid].map(getZoneGeomHash)),
outputFilename: cfg.getFinalTzFilenameFn(tzid),
calculateFn: calculateCb => {
console.log(message)
let geom = getDataSource({ source: 'final', id: tzid })
cfg.derivedZoneConfig[tzid].forEach(zone => {
console.log('-', zone)
if (zone === tzid) return
const zoneData = getDataSource({ source: 'final', id: zone })
geom = debugGeo('union', geom, zoneData)
})
calculateCb(null, postProcessZone(geom))
},
callback: cb
})
},
err => {
if (err) return callback(err)
cfg.loadZonesInMemoryFn()
tzBoundaryCache.end(callback)
}
)
})
}
|
Post process created timezone boundary.
- remove small holes and exclaves
- reduce geometry precision
@param {Geometry} geom The jsts geometry of the timezone
@param {boolean} returnAsObject if true, return as object, otherwise return stringified
@return {Object|String} geojson as object or stringified
|
makeDerivedTimezoneBoundaries
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function loadFinalZonesIntoMemory () {
console.log('load zones into memory')
Object.keys(zoneCfg).forEach(tzid => {
finalZones[tzid] = getDataSource({ source: 'final', id: tzid })
})
}
|
Post process created timezone boundary.
- remove small holes and exclaves
- reduce geometry precision
@param {Geometry} geom The jsts geometry of the timezone
@param {boolean} returnAsObject if true, return as object, otherwise return stringified
@return {Object|String} geojson as object or stringified
|
loadFinalZonesIntoMemory
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function loadFinal1970ZonesIntoMemory () {
console.log('load 1970 zones into memory')
Object.keys(zoneCfg1970).forEach(tzid => {
final1970Zones[tzid] = getDataSource({ source: 'final1970', id: tzid })
})
}
|
Post process created timezone boundary.
- remove small holes and exclaves
- reduce geometry precision
@param {Geometry} geom The jsts geometry of the timezone
@param {boolean} returnAsObject if true, return as object, otherwise return stringified
@return {Object|String} geojson as object or stringified
|
loadFinal1970ZonesIntoMemory
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function loadFinalNowZonesIntoMemory () {
console.log('load Now zones into memory')
Object.keys(zoneCfgNow).forEach(tzid => {
finalNowZones[tzid] = getDataSource({ source: 'finalNow', id: tzid })
})
}
|
Post process created timezone boundary.
- remove small holes and exclaves
- reduce geometry precision
@param {Geometry} geom The jsts geometry of the timezone
@param {boolean} returnAsObject if true, return as object, otherwise return stringified
@return {Object|String} geojson as object or stringified
|
loadFinalNowZonesIntoMemory
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function roundDownToTenth (n) {
return Math.floor(n * 10) / 10
}
|
Post process created timezone boundary.
- remove small holes and exclaves
- reduce geometry precision
@param {Geometry} geom The jsts geometry of the timezone
@param {boolean} returnAsObject if true, return as object, otherwise return stringified
@return {Object|String} geojson as object or stringified
|
roundDownToTenth
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function roundUpToTenth (n) {
return Math.ceil(n * 10) / 10
}
|
Post process created timezone boundary.
- remove small holes and exclaves
- reduce geometry precision
@param {Geometry} geom The jsts geometry of the timezone
@param {boolean} returnAsObject if true, return as object, otherwise return stringified
@return {Object|String} geojson as object or stringified
|
roundUpToTenth
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function formatBounds (bounds) {
let boundsStr = '['
boundsStr += roundDownToTenth(bounds[0]) + ', '
boundsStr += roundDownToTenth(bounds[1]) + ', '
boundsStr += roundUpToTenth(bounds[2]) + ', '
boundsStr += roundUpToTenth(bounds[3]) + ']'
return boundsStr
}
|
Post process created timezone boundary.
- remove small holes and exclaves
- reduce geometry precision
@param {Geometry} geom The jsts geometry of the timezone
@param {boolean} returnAsObject if true, return as object, otherwise return stringified
@return {Object|String} geojson as object or stringified
|
formatBounds
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function validateTimezoneBoundaries (callback) {
console.log('do validation... this may take a few minutes with fresh data')
// load cache if available
const validationCache = new FileCache({
filename: path.join(cacheDir, 'validation-cache.json')
})
validationCache.init(() => {
let allZonesOk = true
const zones = Object.keys(zoneCfg)
const numZones = Object.keys(zoneCfg).length
const validationProgress = new ProgressStats(
'Validation',
numZones * (numZones + 1) / 2
)
let lastPct = 0
const validationCalcs = []
for (let i = 0; i < zones.length; i++) {
for (let j = i + 1; j < zones.length; j++) {
validationCalcs.push({ tzid: zones[i], compareTzid: zones[j] })
}
}
asynclib.each(
validationCalcs,
({ tzid, compareTzid }, validationCb) => {
const allowedOverlapBounds = expectedZoneOverlaps[`${tzid}-${compareTzid}`] || expectedZoneOverlaps[`${compareTzid}-${tzid}`]
validationCache.calculate({
cacheKey: `${getZoneGeomHash(tzid)}-${getZoneGeomHash(compareTzid)}-${hashMd5(allowedOverlapBounds)}`,
calculateFn: calculateCb => {
const zoneGeom = finalZones[tzid]
const compareZoneGeom = finalZones[compareTzid]
let intersects = false
try {
intersects = debugGeo('intersects', zoneGeom, compareZoneGeom)
} catch (e) {
console.warn('warning, encountered intersection error with zone ' + tzid + ' and ' + compareTzid)
}
if (intersects) {
const intersectedGeom = debugGeo('intersection', zoneGeom, compareZoneGeom)
const intersectedArea = intersectedGeom.getArea()
if (intersectedArea > 0.0001) {
// check if the intersected area(s) are one of the expected areas of overlap
const overlapsGeoJson = geoJsonWriter.write(intersectedGeom)
// these zones are allowed to overlap in certain places, make sure the
// found overlap(s) all fit within the expected areas of overlap
if (allowedOverlapBounds) {
// if the overlaps are a multipolygon, make sure each individual
// polygon of overlap fits within at least one of the expected
// overlaps
let overlapsPolygons
switch (overlapsGeoJson.type) {
case 'MultiPolygon':
overlapsPolygons = overlapsGeoJson.coordinates.map(
polygonCoords => ({
coordinates: polygonCoords,
type: 'Polygon'
})
)
break
case 'Polygon':
overlapsPolygons = [overlapsGeoJson]
break
case 'GeometryCollection':
overlapsPolygons = []
overlapsGeoJson.geometries.forEach(geom => {
if (geom.type === 'Polygon') {
overlapsPolygons.push(geom)
} else if (geom.type === 'MultiPolygon') {
geom.coordinates.forEach(polygonCoords => {
overlapsPolygons.push({
coordinates: polygonCoords,
type: 'Polygon'
})
})
}
})
break
default:
console.error('unexpected geojson overlap type')
console.log(overlapsGeoJson)
break
}
let allOverlapsOk = true
overlapsPolygons.forEach((polygon, idx) => {
const bounds = bbox(polygon)
const polygonArea = area.geometry(polygon)
if (
polygonArea > 10 && // ignore small polygons
!allowedOverlapBounds.some(allowedBounds =>
allowedBounds.bounds[0] <= bounds[0] && // minX
allowedBounds.bounds[1] <= bounds[1] && // minY
allowedBounds.bounds[2] >= bounds[2] && // maxX
allowedBounds.bounds[3] >= bounds[3] // maxY
)
) {
console.error(`Unexpected intersection (${polygonArea} area) with bounds: ${formatBounds(bounds)}`)
allOverlapsOk = false
}
})
if (allOverlapsOk) {
return calculateCb(null, { ok: true })
}
}
// at least one unexpected overlap found, output an error and write debug file
console.error('Validation error: ' + tzid + ' intersects ' + compareTzid + ' area: ' + intersectedArea)
const debugFilename = tzid.replace(/\//g, '-') + '-' + compareTzid.replace(/\//g, '-') + '-overlap.json'
fs.writeFileSync(
debugFilename,
JSON.stringify(overlapsGeoJson)
)
console.error('wrote overlap area as file ' + debugFilename)
console.error('To read more about this error, please visit https://git.io/vx6nx')
return calculateCb(null, {
intersectedArea,
ok: false
})
}
}
calculateCb(null, { ok: true })
},
callback: (err, data) => {
const curPct = Math.floor(validationProgress.getPercentage())
if (curPct % 10 === 0 && curPct !== lastPct) {
validationProgress.printStats('Validating zones', true)
lastPct = curPct
}
validationProgress.logNext()
if (err) return validationCb(err)
if (!data.ok) {
console.error('Validation error: ' + tzid + ' intersects ' + compareTzid)
allZonesOk = false
}
validationCb()
}
})
},
err => {
const error = allZonesOk ? null : new Error('Zone validation unsuccessful')
console.log(allZonesOk ? 'Zones Validated Successfully' : 'Errors found during zone validation')
if (err) {
console.error(err)
}
validationCache.end((err) => callback(err || error))
}
)
})
}
|
Post process created timezone boundary.
- remove small holes and exclaves
- reduce geometry precision
@param {Geometry} geom The jsts geometry of the timezone
@param {boolean} returnAsObject if true, return as object, otherwise return stringified
@return {Object|String} geojson as object or stringified
|
validateTimezoneBoundaries
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function addOceans (callback) {
console.log('adding ocean boundaries')
const zones = Object.keys(zoneCfg)
const oceanProgress = new ProgressStats(
'Oceans',
oceanZones.length
)
const oceanBoundaryCache = new FileCache({
filename: path.join(cacheDir, 'ocean-creation-cache.json')
})
oceanBoundaryCache.init(() => {
asynclib.map(
oceanZones,
(oceanZone, zoneCb) => {
oceanProgress.beginTask(oceanZone.tzid, true)
const geoJson = polygon([[
[oceanZone.left, 90],
[oceanZone.left, -90],
[oceanZone.right, -90],
[oceanZone.right, 90],
[oceanZone.left, 90]
]]).geometry
let oceanGeom = geoJsonToGeom(geoJson)
// filter zones to those that have bounds that apply
const tzsInBounds = zones.filter(tzid => {
const zoneEnvelope = finalZones[tzid].getEnvelopeInternal()
return !(
zoneEnvelope.getMaxX() < oceanZone.left ||
zoneEnvelope.getMinX() > oceanZone.right
)
})
oceanBoundaryCache.calculate({
cacheKey: `${oceanZone.tzid}-${hashMd5(tzsInBounds.map(getZoneGeomHash))}`,
calculateFn: calculateCb => {
// diff against applicable zones
tzsInBounds.forEach(finalZone => {
oceanGeom = debugGeo('diff', oceanGeom, finalZones[finalZone])
})
calculateCb(null, {
geom: postProcessZone(oceanGeom, true),
tzid: oceanZone.tzid
})
},
callback: zoneCb
})
},
(err, results) => {
oceanZoneBoundaries = results
oceanBoundaryCache.end(error => callback(err || error))
}
)
})
}
|
Post process created timezone boundary.
- remove small holes and exclaves
- reduce geometry precision
@param {Geometry} geom The jsts geometry of the timezone
@param {boolean} returnAsObject if true, return as object, otherwise return stringified
@return {Object|String} geojson as object or stringified
|
addOceans
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function combineAndWriteZones (callback) {
const regularWriter = new FeatureWriterStream(workingDir + '/combined.json')
const oceanWriter = new FeatureWriterStream(workingDir + '/combined-with-oceans.json')
let regular1970Writer
let ocean1970Writer
let regularNowWriter
let oceanNowWriter
if (!argv.skip_1970_zones) {
regular1970Writer = new FeatureWriterStream(workingDir + '/combined-1970.json')
ocean1970Writer = new FeatureWriterStream(workingDir + '/combined-with-oceans-1970.json')
}
if (!argv.skip_now_zones) {
regularNowWriter = new FeatureWriterStream(workingDir + '/combined-now.json')
oceanNowWriter = new FeatureWriterStream(workingDir + '/combined-with-oceans-now.json')
}
Object.keys(zoneCfg).forEach(zoneName => {
const feature = {
type: 'Feature',
properties: { tzid: zoneName },
geometry: geomToGeoJson(finalZones[zoneName])
}
const stringified = JSON.stringify(feature)
regularWriter.add(stringified)
oceanWriter.add(stringified)
})
if (!argv.skip_1970_zones) {
Object.keys(zoneCfg1970).forEach(zoneName => {
const feature = {
type: 'Feature',
properties: { tzid: zoneName },
geometry: geomToGeoJson(final1970Zones[zoneName])
}
const stringified = JSON.stringify(feature)
regular1970Writer.add(stringified)
ocean1970Writer.add(stringified)
})
}
if (!argv.skip_now_zones) {
Object.keys(zoneCfgNow).forEach(zoneName => {
const feature = {
type: 'Feature',
properties: { tzid: zoneName },
geometry: geomToGeoJson(finalNowZones[zoneName])
}
const stringified = JSON.stringify(feature)
regularNowWriter.add(stringified)
oceanNowWriter.add(stringified)
})
}
oceanZoneBoundaries.forEach(boundary => {
const feature = {
type: 'Feature',
properties: { tzid: boundary.tzid },
geometry: boundary.geom
}
const stringified = JSON.stringify(feature)
oceanWriter.add(stringified)
if (!argv.skip_1970_zones) {
ocean1970Writer.add(stringified)
}
if (!argv.skip_now_zones) {
oceanNowWriter.add(stringified)
}
})
const writerEnders = [
cb => regularWriter.end(cb),
cb => oceanWriter.end(cb)
]
if (!argv.skip_1970_zones) {
writerEnders.push(cb => regular1970Writer.end(cb))
writerEnders.push(cb => ocean1970Writer.end(cb))
}
if (!argv.skip_now_zones) {
writerEnders.push(cb => regularNowWriter.end(cb))
writerEnders.push(cb => oceanNowWriter.end(cb))
}
asynclib.parallel(writerEnders, callback)
}
|
Post process created timezone boundary.
- remove small holes and exclaves
- reduce geometry precision
@param {Geometry} geom The jsts geometry of the timezone
@param {boolean} returnAsObject if true, return as object, otherwise return stringified
@return {Object|String} geojson as object or stringified
|
combineAndWriteZones
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function combineAndWriteOSMZones (callback) {
const osmZoneWriter = new FeatureWriterStream(workingDir + '/combined-osm-zones.json')
Object.keys(zoneCfg).forEach(tzId => {
const tzBoundayName = `${tzId.replaceAll('/', '-')}-tz`
const boundaryFilename = downloadsDir + '/' + tzBoundayName + '.json'
const feature = {
type: 'Feature',
properties: { tzid: tzId },
geometry: require(boundaryFilename)
}
const stringified = JSON.stringify(feature)
osmZoneWriter.add(stringified)
})
osmZoneWriter.end(callback)
}
|
Post process created timezone boundary.
- remove small holes and exclaves
- reduce geometry precision
@param {Geometry} geom The jsts geometry of the timezone
@param {boolean} returnAsObject if true, return as object, otherwise return stringified
@return {Object|String} geojson as object or stringified
|
combineAndWriteOSMZones
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function downloadLastRelease (cb) {
// download latest release info
got(
'https://api.github.com/repos/evansiroky/timezone-boundary-builder/releases/latest'
).json()
.then(data => {
// determine last release version name and download link
lastReleaseName = data.name
lastReleaseJSONfile = `${workingDir}/${lastReleaseName}.json`
let lastReleaseDownloadUrl
for (let i = 0; i < data.assets.length; i++) {
if (data.assets[i].browser_download_url.indexOf('timezones.geojson') > -1) {
lastReleaseDownloadUrl = data.assets[i].browser_download_url
}
}
if (!lastReleaseDownloadUrl) {
return cb(new Error('geojson not found'))
}
// check for file that got downloaded
fs.stat(lastReleaseJSONfile, function (err) {
if (!err) {
// file found, skip download steps
return cb()
}
// file not found, download
console.log(`Downloading latest release to ${lastReleaseJSONfile}.zip`)
const pipeline = promisify(stream.pipeline)
pipeline(
got.stream(lastReleaseDownloadUrl),
fs.createWriteStream(`${lastReleaseJSONfile}.zip`)
).then(() => {
// unzip file
console.log(`unzipping latest release from ${lastReleaseJSONfile}.zip`)
exec(
`unzip -o ${lastReleaseJSONfile} -d ${workingDir}`,
err => {
if (err) { return cb(err) }
const srcFile = path.join(workingDir, 'combined.json')
console.log(`unzipped file: ${srcFile}`)
const destFile = lastReleaseJSONfile
console.log(`Renaming ${srcFile} to ${destFile}`)
fs.rename(srcFile, destFile, cb)
}
)
})
})
})
}
|
Post process created timezone boundary.
- remove small holes and exclaves
- reduce geometry precision
@param {Geometry} geom The jsts geometry of the timezone
@param {boolean} returnAsObject if true, return as object, otherwise return stringified
@return {Object|String} geojson as object or stringified
|
downloadLastRelease
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function zipGeoJsonFiles (cb) {
const zipCommands = [
['timezones.geojson.zip', 'combined.json'],
['timezones-with-oceans.geojson.zip', 'combined-with-oceans.json']
]
if (!argv.skip_1970_zones) {
zipCommands.push(['timezones-1970.geojson.zip', 'combined-1970.json'])
zipCommands.push(['timezones-with-oceans-1970.geojson.zip', 'combined-with-oceans-1970.json'])
}
if (!argv.skip_now_zones) {
zipCommands.push(['timezones-now.geojson.zip', 'combined-now.json'])
zipCommands.push(['timezones-with-oceans-now.geojson.zip', 'combined-with-oceans-now.json'])
}
asynclib.each(
zipCommands.map(([dist, working]) => `zip -j ${path.join(distDir, dist)} ${path.join(workingDir, working)}`),
exec,
cb
)
}
|
Post process created timezone boundary.
- remove small holes and exclaves
- reduce geometry precision
@param {Geometry} geom The jsts geometry of the timezone
@param {boolean} returnAsObject if true, return as object, otherwise return stringified
@return {Object|String} geojson as object or stringified
|
zipGeoJsonFiles
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function makeShapefile (config, cb) {
rimraf(config.shapeFileGlob, rimrafErr => {
if (rimrafErr) { return cb(rimrafErr) }
exec(
`ogr2ogr -f "ESRI Shapefile" ${config.shapeFile} ${config.jsonFile}`,
ogrErr => {
if (ogrErr) { return cb(ogrErr) }
if (!config.shapeFileZip) { return cb() }
exec(`zip -j ${config.shapeFileZip} ${config.shapeFileGlob}`, cb)
}
)
})
}
|
Post process created timezone boundary.
- remove small holes and exclaves
- reduce geometry precision
@param {Geometry} geom The jsts geometry of the timezone
@param {boolean} returnAsObject if true, return as object, otherwise return stringified
@return {Object|String} geojson as object or stringified
|
makeShapefile
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function makeShapefiles (cb) {
const shapefileConfigs = [
{ // combined without oceans
jsonFile: path.join(workingDir, 'combined.json'),
shapeFile: path.join(workingDir, 'combined-shapefile.shp'),
shapeFileGlob: path.join(workingDir, 'combined-shapefile.*'),
shapeFileZip: path.join(distDir, 'timezones.shapefile.zip')
}, { // combined with oceans
jsonFile: path.join(workingDir, 'combined-with-oceans.json'),
shapeFile: path.join(workingDir, 'combined-shapefile-with-oceans.shp'),
shapeFileGlob: path.join(workingDir, 'combined-shapefile-with-oceans.*'),
shapeFileZip: path.join(distDir, 'timezones-with-oceans.shapefile.zip')
}
]
if (!argv.skip_1970_zones) {
shapefileConfigs.push({ // 1970 without oceans
jsonFile: path.join(workingDir, 'combined-1970.json'),
shapeFile: path.join(workingDir, 'combined-shapefile-1970.shp'),
shapeFileGlob: path.join(workingDir, 'combined-shapefile-1970.*'),
shapeFileZip: path.join(distDir, 'timezones-1970.shapefile.zip')
})
shapefileConfigs.push({ // 1970 with oceans
jsonFile: path.join(workingDir, 'combined-with-oceans-1970.json'),
shapeFile: path.join(workingDir, 'combined-shapefile-with-oceans-1970.shp'),
shapeFileGlob: path.join(workingDir, 'combined-shapefile-with-oceans-1970.*'),
shapeFileZip: path.join(distDir, 'timezones-with-oceans-1970.shapefile.zip')
})
}
if (!argv.skip_now_zones) {
shapefileConfigs.push({ // now without oceans
jsonFile: path.join(workingDir, 'combined-now.json'),
shapeFile: path.join(workingDir, 'combined-shapefile-now.shp'),
shapeFileGlob: path.join(workingDir, 'combined-shapefile-now.*'),
shapeFileZip: path.join(distDir, 'timezones-now.shapefile.zip')
})
shapefileConfigs.push({ // now with oceans
jsonFile: path.join(workingDir, 'combined-with-oceans-now.json'),
shapeFile: path.join(workingDir, 'combined-shapefile-with-oceans-now.shp'),
shapeFileGlob: path.join(workingDir, 'combined-shapefile-with-oceans-now.*'),
shapeFileZip: path.join(distDir, 'timezones-with-oceans-now.shapefile.zip')
})
}
asynclib.each(shapefileConfigs, makeShapefile, cb)
}
|
Post process created timezone boundary.
- remove small holes and exclaves
- reduce geometry precision
@param {Geometry} geom The jsts geometry of the timezone
@param {boolean} returnAsObject if true, return as object, otherwise return stringified
@return {Object|String} geojson as object or stringified
|
makeShapefiles
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function analyzeChangesFromLastRelease (cb) {
// load last release data into memory
console.log('loading previous release into memory')
const lastReleaseData = require(lastReleaseJSONfile)
// load each feature's geojson into JSTS format and then organized by tzid
const lastReleaseZones = {}
lastReleaseData.features.forEach(
feature => {
lastReleaseZones[feature.properties.tzid] = feature
}
)
// generate set of keys from last release and current
const zoneNames = new Set()
Object.keys(finalZones).forEach(zoneName => zoneNames.add(zoneName))
Object.keys(lastReleaseZones).forEach(zoneName => zoneNames.add(zoneName))
// create diff for each zone
const analysisProgress = new ProgressStats(
'Analyzing diffs',
zoneNames.size
)
const additionsWriter = new FeatureWriterStream(workingDir + '/additions.json')
const removalsWriter = new FeatureWriterStream(workingDir + '/removals.json')
const analysisCache = new FileLookupCache({
filename: path.join(cacheDir, 'last-release-diffs-analysis-cache.json')
})
analysisCache.init(() => {
asynclib.each(
zoneNames,
(zoneName, zoneCb) => {
analysisCache.calculate({
cacheKey: `${lastReleaseName}-${zoneName}-${getZoneGeomHash(zoneName)}`,
outputFilename: path.join(cacheDir, 'last-release-diffs', safeTzFilename(zoneName)),
calculateFn: calculateCb => {
console.log(`Analyzing diffs from last release for ${zoneName}`)
const lastReleaseZone = lastReleaseZones[zoneName]
const finalZone = finalZones[zoneName]
const results = {}
if (finalZone && lastReleaseZone) {
// some zones take forever to diff unless they are buffered, so buffer by
// just a small amount
const lastReleaseGeom = geoJsonToGeom(
lastReleaseZone.geometry
).buffer(bufferDistance)
const curDataGeom = finalZone.buffer(bufferDistance)
// don't diff equal geometries
if (!curDataGeom.equals(lastReleaseGeom)) {
// diff current - last = additions
const addition = debugGeo(
'diff',
curDataGeom,
lastReleaseGeom,
false,
true
)
if (addition.getArea() > 0.0001) {
results.addition = geomToGeoJson(addition)
}
// diff last - current = removals
const removal = debugGeo(
'diff',
lastReleaseGeom,
curDataGeom,
false,
true
)
if (removal.getArea() > 0.0001) {
results.removal = geomToGeoJson(removal)
}
}
} else if (finalZone) {
results.addition = geomToGeoJson(finalZone)
} else {
results.removal = lastReleaseZone
}
calculateCb(null, JSON.stringify(results))
},
callback: (err, results) => {
analysisProgress.beginTask(zoneName, true)
if (err) return zoneCb(err)
if (results.addition) {
additionsWriter.add(JSON.stringify({
type: 'Feature',
properties: { tzid: zoneName },
geometry: results.addition
}))
}
if (results.removal) {
removalsWriter.add(JSON.stringify({
type: 'Feature',
properties: { tzid: zoneName },
geometry: results.removal
}))
}
zoneCb()
},
returnFile: true
})
},
err => {
if (err) return cb(err)
// write files and close cache
asynclib.parallel([
wcb => additionsWriter.end(wcb),
wcb => removalsWriter.end(wcb),
ccb => analysisCache.end(ccb)
], cb)
}
)
})
}
|
Post process created timezone boundary.
- remove small holes and exclaves
- reduce geometry precision
@param {Geometry} geom The jsts geometry of the timezone
@param {boolean} returnAsObject if true, return as object, otherwise return stringified
@return {Object|String} geojson as object or stringified
|
analyzeChangesFromLastRelease
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
function assembleAndZipInputData (callback) {
// since lots of files are at absolute paths, assemble a temporary folder with needed files
const tempInputFilesDir = path.join(workingDir, 'input-data')
asynclib.series(
[
// remove previous folder
cb => fs.rm(tempInputFilesDir, { recursive: true }, cb),
// create it again
cb => fs.mkdir(tempInputFilesDir, cb),
// copy necessary files
copyCb => {
asynclib.parallel(
[
// downloads
cb => fs.cp(
downloadsDir,
path.join(tempInputFilesDir, 'downloads'),
{ recursive: true },
cb
),
// cache
cb => fs.cp(
cacheDir,
path.join(tempInputFilesDir, 'cache'),
{ recursive: true },
cb
),
// etc single files (assumes cwd is repo root)
cb => fs.cp('timezones.json', path.join(tempInputFilesDir, 'timezones.json'), cb),
cb => fs.cp('osmBoundarySources.json', path.join(tempInputFilesDir, 'osmBoundarySources.json'), cb),
cb => fs.cp('expectedZoneOverlaps.json', path.join(tempInputFilesDir, 'expectedZoneOverlaps.json'), cb)
],
copyCb
)
},
// zip up
cb => {
const zipFilepath = path.join(distDir, 'input-data.zip')
exec(
`zip -r ${zipFilepath} input-data`,
{ cwd: workingDir },
cb
)
}
],
callback
)
}
|
Post process created timezone boundary.
- remove small holes and exclaves
- reduce geometry precision
@param {Geometry} geom The jsts geometry of the timezone
@param {boolean} returnAsObject if true, return as object, otherwise return stringified
@return {Object|String} geojson as object or stringified
|
assembleAndZipInputData
|
javascript
|
evansiroky/timezone-boundary-builder
|
index.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/index.js
|
MIT
|
beginTask (message, logTimeLeft) {
this.printStats(message, logTimeLeft)
this.logNext()
}
|
Begin a new task. Print the current progress and then increment the number of tasks.
@param {string} A short message about the current task progress
@param {[boolean]} logTimeLeft whether or not to log the time left.
|
beginTask
|
javascript
|
evansiroky/timezone-boundary-builder
|
util/progressStats.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/util/progressStats.js
|
MIT
|
printStats (message, logTimeLeft) {
message = `${message}; ${this.trackerName} progress: ${this.getPercentage()}% done`
if (logTimeLeft) {
message = `${message} - ${this.getTimeLeft()} left`
}
console.log(message)
}
|
Print the current progress.
@param {string} A short message about the current task progress
@param {[boolean]} logTimeLeft whether or not to log the time left.
|
printStats
|
javascript
|
evansiroky/timezone-boundary-builder
|
util/progressStats.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/util/progressStats.js
|
MIT
|
getPercentage () {
const current = (this.taskCounter / this.totalTasks)
return Math.round(current * 1000.0) / 10.0
}
|
calculates the percentage of finished downloads
@returns {string}
|
getPercentage
|
javascript
|
evansiroky/timezone-boundary-builder
|
util/progressStats.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/util/progressStats.js
|
MIT
|
getTimeLeft () {
if (this.taskCounter === 0) return '?'
const averageTimePerTask = (Date.now() - this.beginTime.getTime()) / this.taskCounter
const tasksLeft = this.totalTasks - this.taskCounter
const millisecondsLeft = averageTimePerTask * tasksLeft
return this.formatMilliseconds(millisecondsLeft)
}
|
calculates the time left and outputs it in human readable format
calculation is based on the average time per task so far
@returns {string}
|
getTimeLeft
|
javascript
|
evansiroky/timezone-boundary-builder
|
util/progressStats.js
|
https://github.com/evansiroky/timezone-boundary-builder/blob/master/util/progressStats.js
|
MIT
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.