code
stringlengths 24
2.07M
| docstring
stringlengths 25
85.3k
| func_name
stringlengths 1
92
| language
stringclasses 1
value | repo
stringlengths 5
64
| path
stringlengths 4
172
| url
stringlengths 44
218
| license
stringclasses 7
values |
---|---|---|---|---|---|---|---|
function queueCompletedSegment(boundary, segment) {
if (segment.chunks.length === 0 && segment.children.length === 1 && segment.children[0].boundary === null) {
var childSegment = segment.children[0];
childSegment.id = segment.id;
childSegment.parentFlushed = true;
if (childSegment.status === COMPLETED) {
queueCompletedSegment(boundary, childSegment);
}
} else {
var completedSegments = boundary.completedSegments;
completedSegments.push(segment);
}
}
|
Sets a subset of the state. This only exists because _pendingState is
internal. This provides a merging strategy that is not available to deep
properties which is confusing. TODO: Expose pendingState or don't use it
during the merge.
@param {ReactClass} publicInstance The instance that should rerender.
@param {object} partialState Next partial state to be merged with state.
@param {?function} callback Called after component is updated.
@param {?string} Name of the calling function in the public API.
@internal
|
queueCompletedSegment
|
javascript
|
wasmerio/winterjs
|
tests/complex.js
|
https://github.com/wasmerio/winterjs/blob/master/tests/complex.js
|
MIT
|
function finishedTask(request, boundary, segment) {
if (boundary === null) {
if (segment.parentFlushed) {
if (request.completedRootSegment !== null) {
throw new Error("There can only be one root segment. This is a bug in React.");
}
request.completedRootSegment = segment;
}
request.pendingRootTasks--;
if (request.pendingRootTasks === 0) {
request.onShellError = noop$1;
var onShellReady = request.onShellReady;
onShellReady();
}
} else {
boundary.pendingTasks--;
if (boundary.forceClientRender)
;
else if (boundary.pendingTasks === 0) {
if (segment.parentFlushed) {
if (segment.status === COMPLETED) {
queueCompletedSegment(boundary, segment);
}
}
if (boundary.parentFlushed) {
request.completedBoundaries.push(boundary);
}
boundary.fallbackAbortableTasks.forEach(abortTaskSoft, request);
boundary.fallbackAbortableTasks.clear();
} else {
if (segment.parentFlushed) {
if (segment.status === COMPLETED) {
queueCompletedSegment(boundary, segment);
var completedSegments = boundary.completedSegments;
if (completedSegments.length === 1) {
if (boundary.parentFlushed) {
request.partialBoundaries.push(boundary);
}
}
}
}
}
}
request.allPendingTasks--;
if (request.allPendingTasks === 0) {
var onAllReady = request.onAllReady;
onAllReady();
}
}
|
Sets a subset of the state. This only exists because _pendingState is
internal. This provides a merging strategy that is not available to deep
properties which is confusing. TODO: Expose pendingState or don't use it
during the merge.
@param {ReactClass} publicInstance The instance that should rerender.
@param {object} partialState Next partial state to be merged with state.
@param {?function} callback Called after component is updated.
@param {?string} Name of the calling function in the public API.
@internal
|
finishedTask
|
javascript
|
wasmerio/winterjs
|
tests/complex.js
|
https://github.com/wasmerio/winterjs/blob/master/tests/complex.js
|
MIT
|
function retryTask(request, task) {
var segment = task.blockedSegment;
if (segment.status !== PENDING) {
return;
}
switchContext(task.context);
var prevTaskInDEV = null;
{
prevTaskInDEV = currentTaskInDEV;
currentTaskInDEV = task;
}
try {
renderNodeDestructive(request, task, task.node);
pushSegmentFinale(segment.chunks, request.responseState, segment.lastPushedText, segment.textEmbedded);
task.abortSet.delete(task);
segment.status = COMPLETED;
finishedTask(request, task.blockedBoundary, segment);
} catch (x) {
resetHooksState();
if (typeof x === "object" && x !== null && typeof x.then === "function") {
var ping = task.ping;
x.then(ping, ping);
} else {
task.abortSet.delete(task);
segment.status = ERRORED;
erroredTask(request, task.blockedBoundary, segment, x);
}
} finally {
{
currentTaskInDEV = prevTaskInDEV;
}
}
}
|
Sets a subset of the state. This only exists because _pendingState is
internal. This provides a merging strategy that is not available to deep
properties which is confusing. TODO: Expose pendingState or don't use it
during the merge.
@param {ReactClass} publicInstance The instance that should rerender.
@param {object} partialState Next partial state to be merged with state.
@param {?function} callback Called after component is updated.
@param {?string} Name of the calling function in the public API.
@internal
|
retryTask
|
javascript
|
wasmerio/winterjs
|
tests/complex.js
|
https://github.com/wasmerio/winterjs/blob/master/tests/complex.js
|
MIT
|
function performWork(request) {
if (request.status === CLOSED) {
return;
}
var prevContext = getActiveContext();
var prevDispatcher = ReactCurrentDispatcher$1.current;
ReactCurrentDispatcher$1.current = Dispatcher;
var prevGetCurrentStackImpl;
{
prevGetCurrentStackImpl = ReactDebugCurrentFrame$1.getCurrentStack;
ReactDebugCurrentFrame$1.getCurrentStack = getCurrentStackInDEV;
}
var prevResponseState = currentResponseState;
setCurrentResponseState(request.responseState);
try {
var pingedTasks = request.pingedTasks;
var i;
for (i = 0; i < pingedTasks.length; i++) {
var task = pingedTasks[i];
retryTask(request, task);
}
pingedTasks.splice(0, i);
if (request.destination !== null) {
flushCompletedQueues(request, request.destination);
}
} catch (error2) {
logRecoverableError(request, error2);
fatalError(request, error2);
} finally {
setCurrentResponseState(prevResponseState);
ReactCurrentDispatcher$1.current = prevDispatcher;
{
ReactDebugCurrentFrame$1.getCurrentStack = prevGetCurrentStackImpl;
}
if (prevDispatcher === Dispatcher) {
switchContext(prevContext);
}
}
}
|
Sets a subset of the state. This only exists because _pendingState is
internal. This provides a merging strategy that is not available to deep
properties which is confusing. TODO: Expose pendingState or don't use it
during the merge.
@param {ReactClass} publicInstance The instance that should rerender.
@param {object} partialState Next partial state to be merged with state.
@param {?function} callback Called after component is updated.
@param {?string} Name of the calling function in the public API.
@internal
|
performWork
|
javascript
|
wasmerio/winterjs
|
tests/complex.js
|
https://github.com/wasmerio/winterjs/blob/master/tests/complex.js
|
MIT
|
function flushSubtree(request, destination, segment) {
segment.parentFlushed = true;
switch (segment.status) {
case PENDING: {
var segmentID = segment.id = request.nextSegmentId++;
segment.lastPushedText = false;
segment.textEmbedded = false;
return writePlaceholder(destination, request.responseState, segmentID);
}
case COMPLETED: {
segment.status = FLUSHED;
var r = true;
var chunks = segment.chunks;
var chunkIdx = 0;
var children = segment.children;
for (var childIdx = 0; childIdx < children.length; childIdx++) {
var nextChild = children[childIdx];
for (; chunkIdx < nextChild.index; chunkIdx++) {
writeChunk(destination, chunks[chunkIdx]);
}
r = flushSegment(request, destination, nextChild);
}
for (; chunkIdx < chunks.length - 1; chunkIdx++) {
writeChunk(destination, chunks[chunkIdx]);
}
if (chunkIdx < chunks.length) {
r = writeChunkAndReturn(destination, chunks[chunkIdx]);
}
return r;
}
default: {
throw new Error("Aborted, errored or already flushed boundaries should not be flushed again. This is a bug in React.");
}
}
}
|
Sets a subset of the state. This only exists because _pendingState is
internal. This provides a merging strategy that is not available to deep
properties which is confusing. TODO: Expose pendingState or don't use it
during the merge.
@param {ReactClass} publicInstance The instance that should rerender.
@param {object} partialState Next partial state to be merged with state.
@param {?function} callback Called after component is updated.
@param {?string} Name of the calling function in the public API.
@internal
|
flushSubtree
|
javascript
|
wasmerio/winterjs
|
tests/complex.js
|
https://github.com/wasmerio/winterjs/blob/master/tests/complex.js
|
MIT
|
function flushSegment(request, destination, segment) {
var boundary = segment.boundary;
if (boundary === null) {
return flushSubtree(request, destination, segment);
}
boundary.parentFlushed = true;
if (boundary.forceClientRender) {
writeStartClientRenderedSuspenseBoundary(destination, request.responseState, boundary.errorDigest, boundary.errorMessage, boundary.errorComponentStack);
flushSubtree(request, destination, segment);
return writeEndClientRenderedSuspenseBoundary(destination, request.responseState);
} else if (boundary.pendingTasks > 0) {
boundary.rootSegmentID = request.nextSegmentId++;
if (boundary.completedSegments.length > 0) {
request.partialBoundaries.push(boundary);
}
var id = boundary.id = assignSuspenseBoundaryID(request.responseState);
writeStartPendingSuspenseBoundary(destination, request.responseState, id);
flushSubtree(request, destination, segment);
return writeEndPendingSuspenseBoundary(destination, request.responseState);
} else if (boundary.byteSize > request.progressiveChunkSize) {
boundary.rootSegmentID = request.nextSegmentId++;
request.completedBoundaries.push(boundary);
writeStartPendingSuspenseBoundary(destination, request.responseState, boundary.id);
flushSubtree(request, destination, segment);
return writeEndPendingSuspenseBoundary(destination, request.responseState);
} else {
writeStartCompletedSuspenseBoundary(destination, request.responseState);
var completedSegments = boundary.completedSegments;
if (completedSegments.length !== 1) {
throw new Error("A previously unvisited boundary must have exactly one root segment. This is a bug in React.");
}
var contentSegment = completedSegments[0];
flushSegment(request, destination, contentSegment);
return writeEndCompletedSuspenseBoundary(destination, request.responseState);
}
}
|
Sets a subset of the state. This only exists because _pendingState is
internal. This provides a merging strategy that is not available to deep
properties which is confusing. TODO: Expose pendingState or don't use it
during the merge.
@param {ReactClass} publicInstance The instance that should rerender.
@param {object} partialState Next partial state to be merged with state.
@param {?function} callback Called after component is updated.
@param {?string} Name of the calling function in the public API.
@internal
|
flushSegment
|
javascript
|
wasmerio/winterjs
|
tests/complex.js
|
https://github.com/wasmerio/winterjs/blob/master/tests/complex.js
|
MIT
|
function flushClientRenderedBoundary(request, destination, boundary) {
return writeClientRenderBoundaryInstruction(destination, request.responseState, boundary.id, boundary.errorDigest, boundary.errorMessage, boundary.errorComponentStack);
}
|
Sets a subset of the state. This only exists because _pendingState is
internal. This provides a merging strategy that is not available to deep
properties which is confusing. TODO: Expose pendingState or don't use it
during the merge.
@param {ReactClass} publicInstance The instance that should rerender.
@param {object} partialState Next partial state to be merged with state.
@param {?function} callback Called after component is updated.
@param {?string} Name of the calling function in the public API.
@internal
|
flushClientRenderedBoundary
|
javascript
|
wasmerio/winterjs
|
tests/complex.js
|
https://github.com/wasmerio/winterjs/blob/master/tests/complex.js
|
MIT
|
function flushSegmentContainer(request, destination, segment) {
writeStartSegment(destination, request.responseState, segment.formatContext, segment.id);
flushSegment(request, destination, segment);
return writeEndSegment(destination, segment.formatContext);
}
|
Sets a subset of the state. This only exists because _pendingState is
internal. This provides a merging strategy that is not available to deep
properties which is confusing. TODO: Expose pendingState or don't use it
during the merge.
@param {ReactClass} publicInstance The instance that should rerender.
@param {object} partialState Next partial state to be merged with state.
@param {?function} callback Called after component is updated.
@param {?string} Name of the calling function in the public API.
@internal
|
flushSegmentContainer
|
javascript
|
wasmerio/winterjs
|
tests/complex.js
|
https://github.com/wasmerio/winterjs/blob/master/tests/complex.js
|
MIT
|
function flushCompletedBoundary(request, destination, boundary) {
var completedSegments = boundary.completedSegments;
var i = 0;
for (; i < completedSegments.length; i++) {
var segment = completedSegments[i];
flushPartiallyCompletedSegment(request, destination, boundary, segment);
}
completedSegments.length = 0;
return writeCompletedBoundaryInstruction(destination, request.responseState, boundary.id, boundary.rootSegmentID);
}
|
Sets a subset of the state. This only exists because _pendingState is
internal. This provides a merging strategy that is not available to deep
properties which is confusing. TODO: Expose pendingState or don't use it
during the merge.
@param {ReactClass} publicInstance The instance that should rerender.
@param {object} partialState Next partial state to be merged with state.
@param {?function} callback Called after component is updated.
@param {?string} Name of the calling function in the public API.
@internal
|
flushCompletedBoundary
|
javascript
|
wasmerio/winterjs
|
tests/complex.js
|
https://github.com/wasmerio/winterjs/blob/master/tests/complex.js
|
MIT
|
function flushPartialBoundary(request, destination, boundary) {
var completedSegments = boundary.completedSegments;
var i = 0;
for (; i < completedSegments.length; i++) {
var segment = completedSegments[i];
if (!flushPartiallyCompletedSegment(request, destination, boundary, segment)) {
i++;
completedSegments.splice(0, i);
return false;
}
}
completedSegments.splice(0, i);
return true;
}
|
Sets a subset of the state. This only exists because _pendingState is
internal. This provides a merging strategy that is not available to deep
properties which is confusing. TODO: Expose pendingState or don't use it
during the merge.
@param {ReactClass} publicInstance The instance that should rerender.
@param {object} partialState Next partial state to be merged with state.
@param {?function} callback Called after component is updated.
@param {?string} Name of the calling function in the public API.
@internal
|
flushPartialBoundary
|
javascript
|
wasmerio/winterjs
|
tests/complex.js
|
https://github.com/wasmerio/winterjs/blob/master/tests/complex.js
|
MIT
|
function flushPartiallyCompletedSegment(request, destination, boundary, segment) {
if (segment.status === FLUSHED) {
return true;
}
var segmentID = segment.id;
if (segmentID === -1) {
var rootSegmentID = segment.id = boundary.rootSegmentID;
if (rootSegmentID === -1) {
throw new Error("A root segment ID must have been assigned by now. This is a bug in React.");
}
return flushSegmentContainer(request, destination, segment);
} else {
flushSegmentContainer(request, destination, segment);
return writeCompletedSegmentInstruction(destination, request.responseState, segmentID);
}
}
|
Sets a subset of the state. This only exists because _pendingState is
internal. This provides a merging strategy that is not available to deep
properties which is confusing. TODO: Expose pendingState or don't use it
during the merge.
@param {ReactClass} publicInstance The instance that should rerender.
@param {object} partialState Next partial state to be merged with state.
@param {?function} callback Called after component is updated.
@param {?string} Name of the calling function in the public API.
@internal
|
flushPartiallyCompletedSegment
|
javascript
|
wasmerio/winterjs
|
tests/complex.js
|
https://github.com/wasmerio/winterjs/blob/master/tests/complex.js
|
MIT
|
function flushCompletedQueues(request, destination) {
beginWriting();
try {
var completedRootSegment = request.completedRootSegment;
if (completedRootSegment !== null && request.pendingRootTasks === 0) {
flushSegment(request, destination, completedRootSegment);
request.completedRootSegment = null;
writeCompletedRoot(destination, request.responseState);
}
var clientRenderedBoundaries = request.clientRenderedBoundaries;
var i;
for (i = 0; i < clientRenderedBoundaries.length; i++) {
var boundary = clientRenderedBoundaries[i];
if (!flushClientRenderedBoundary(request, destination, boundary)) {
request.destination = null;
i++;
clientRenderedBoundaries.splice(0, i);
return;
}
}
clientRenderedBoundaries.splice(0, i);
var completedBoundaries = request.completedBoundaries;
for (i = 0; i < completedBoundaries.length; i++) {
var _boundary = completedBoundaries[i];
if (!flushCompletedBoundary(request, destination, _boundary)) {
request.destination = null;
i++;
completedBoundaries.splice(0, i);
return;
}
}
completedBoundaries.splice(0, i);
completeWriting(destination);
beginWriting(destination);
var partialBoundaries = request.partialBoundaries;
for (i = 0; i < partialBoundaries.length; i++) {
var _boundary2 = partialBoundaries[i];
if (!flushPartialBoundary(request, destination, _boundary2)) {
request.destination = null;
i++;
partialBoundaries.splice(0, i);
return;
}
}
partialBoundaries.splice(0, i);
var largeBoundaries = request.completedBoundaries;
for (i = 0; i < largeBoundaries.length; i++) {
var _boundary3 = largeBoundaries[i];
if (!flushCompletedBoundary(request, destination, _boundary3)) {
request.destination = null;
i++;
largeBoundaries.splice(0, i);
return;
}
}
largeBoundaries.splice(0, i);
} finally {
completeWriting(destination);
if (request.allPendingTasks === 0 && request.pingedTasks.length === 0 && request.clientRenderedBoundaries.length === 0 && request.completedBoundaries.length === 0) {
{
if (request.abortableTasks.size !== 0) {
error("There was still abortable task at the root when we closed. This is a bug in React.");
}
}
close(destination);
}
}
}
|
Sets a subset of the state. This only exists because _pendingState is
internal. This provides a merging strategy that is not available to deep
properties which is confusing. TODO: Expose pendingState or don't use it
during the merge.
@param {ReactClass} publicInstance The instance that should rerender.
@param {object} partialState Next partial state to be merged with state.
@param {?function} callback Called after component is updated.
@param {?string} Name of the calling function in the public API.
@internal
|
flushCompletedQueues
|
javascript
|
wasmerio/winterjs
|
tests/complex.js
|
https://github.com/wasmerio/winterjs/blob/master/tests/complex.js
|
MIT
|
function startWork(request) {
scheduleWork(function() {
return performWork(request);
});
}
|
Sets a subset of the state. This only exists because _pendingState is
internal. This provides a merging strategy that is not available to deep
properties which is confusing. TODO: Expose pendingState or don't use it
during the merge.
@param {ReactClass} publicInstance The instance that should rerender.
@param {object} partialState Next partial state to be merged with state.
@param {?function} callback Called after component is updated.
@param {?string} Name of the calling function in the public API.
@internal
|
startWork
|
javascript
|
wasmerio/winterjs
|
tests/complex.js
|
https://github.com/wasmerio/winterjs/blob/master/tests/complex.js
|
MIT
|
function startFlowing(request, destination) {
if (request.status === CLOSING) {
request.status = CLOSED;
closeWithError(destination, request.fatalError);
return;
}
if (request.status === CLOSED) {
return;
}
if (request.destination !== null) {
return;
}
request.destination = destination;
try {
flushCompletedQueues(request, destination);
} catch (error2) {
logRecoverableError(request, error2);
fatalError(request, error2);
}
}
|
Sets a subset of the state. This only exists because _pendingState is
internal. This provides a merging strategy that is not available to deep
properties which is confusing. TODO: Expose pendingState or don't use it
during the merge.
@param {ReactClass} publicInstance The instance that should rerender.
@param {object} partialState Next partial state to be merged with state.
@param {?function} callback Called after component is updated.
@param {?string} Name of the calling function in the public API.
@internal
|
startFlowing
|
javascript
|
wasmerio/winterjs
|
tests/complex.js
|
https://github.com/wasmerio/winterjs/blob/master/tests/complex.js
|
MIT
|
function abort(request, reason) {
try {
var abortableTasks = request.abortableTasks;
abortableTasks.forEach(function(task) {
return abortTask(task, request, reason);
});
abortableTasks.clear();
if (request.destination !== null) {
flushCompletedQueues(request, request.destination);
}
} catch (error2) {
logRecoverableError(request, error2);
fatalError(request, error2);
}
}
|
Sets a subset of the state. This only exists because _pendingState is
internal. This provides a merging strategy that is not available to deep
properties which is confusing. TODO: Expose pendingState or don't use it
during the merge.
@param {ReactClass} publicInstance The instance that should rerender.
@param {object} partialState Next partial state to be merged with state.
@param {?function} callback Called after component is updated.
@param {?string} Name of the calling function in the public API.
@internal
|
abort
|
javascript
|
wasmerio/winterjs
|
tests/complex.js
|
https://github.com/wasmerio/winterjs/blob/master/tests/complex.js
|
MIT
|
function renderToReadableStream(children, options) {
return new Promise(function(resolve, reject) {
var onFatalError;
var onAllReady;
var allReady = new Promise(function(res, rej) {
onAllReady = res;
onFatalError = rej;
});
function onShellReady() {
var stream = new ReadableStream(
{
type: "bytes",
pull: function(controller) {
startFlowing(request, controller);
},
cancel: function(reason) {
abort(request);
}
},
// $FlowFixMe size() methods are not allowed on byte streams.
{
highWaterMark: 0
}
);
stream.allReady = allReady;
resolve(stream);
}
function onShellError(error2) {
allReady.catch(function() {
});
reject(error2);
}
var request = createRequest(children, createResponseState(options ? options.identifierPrefix : void 0, options ? options.nonce : void 0, options ? options.bootstrapScriptContent : void 0, options ? options.bootstrapScripts : void 0, options ? options.bootstrapModules : void 0), createRootFormatContext(options ? options.namespaceURI : void 0), options ? options.progressiveChunkSize : void 0, options ? options.onError : void 0, onAllReady, onShellReady, onShellError, onFatalError);
if (options && options.signal) {
var signal = options.signal;
var listener = function() {
abort(request, signal.reason);
signal.removeEventListener("abort", listener);
};
signal.addEventListener("abort", listener);
}
startWork(request);
});
}
|
Sets a subset of the state. This only exists because _pendingState is
internal. This provides a merging strategy that is not available to deep
properties which is confusing. TODO: Expose pendingState or don't use it
during the merge.
@param {ReactClass} publicInstance The instance that should rerender.
@param {object} partialState Next partial state to be merged with state.
@param {?function} callback Called after component is updated.
@param {?string} Name of the calling function in the public API.
@internal
|
renderToReadableStream
|
javascript
|
wasmerio/winterjs
|
tests/complex.js
|
https://github.com/wasmerio/winterjs/blob/master/tests/complex.js
|
MIT
|
function onShellReady() {
var stream = new ReadableStream(
{
type: "bytes",
pull: function(controller) {
startFlowing(request, controller);
},
cancel: function(reason) {
abort(request);
}
},
// $FlowFixMe size() methods are not allowed on byte streams.
{
highWaterMark: 0
}
);
stream.allReady = allReady;
resolve(stream);
}
|
Sets a subset of the state. This only exists because _pendingState is
internal. This provides a merging strategy that is not available to deep
properties which is confusing. TODO: Expose pendingState or don't use it
during the merge.
@param {ReactClass} publicInstance The instance that should rerender.
@param {object} partialState Next partial state to be merged with state.
@param {?function} callback Called after component is updated.
@param {?string} Name of the calling function in the public API.
@internal
|
onShellReady
|
javascript
|
wasmerio/winterjs
|
tests/complex.js
|
https://github.com/wasmerio/winterjs/blob/master/tests/complex.js
|
MIT
|
function onShellError(error2) {
allReady.catch(function() {
});
reject(error2);
}
|
Sets a subset of the state. This only exists because _pendingState is
internal. This provides a merging strategy that is not available to deep
properties which is confusing. TODO: Expose pendingState or don't use it
during the merge.
@param {ReactClass} publicInstance The instance that should rerender.
@param {object} partialState Next partial state to be merged with state.
@param {?function} callback Called after component is updated.
@param {?string} Name of the calling function in the public API.
@internal
|
onShellError
|
javascript
|
wasmerio/winterjs
|
tests/complex.js
|
https://github.com/wasmerio/winterjs/blob/master/tests/complex.js
|
MIT
|
listener = function() {
abort(request, signal.reason);
signal.removeEventListener("abort", listener);
}
|
Sets a subset of the state. This only exists because _pendingState is
internal. This provides a merging strategy that is not available to deep
properties which is confusing. TODO: Expose pendingState or don't use it
during the merge.
@param {ReactClass} publicInstance The instance that should rerender.
@param {object} partialState Next partial state to be merged with state.
@param {?function} callback Called after component is updated.
@param {?string} Name of the calling function in the public API.
@internal
|
listener
|
javascript
|
wasmerio/winterjs
|
tests/complex.js
|
https://github.com/wasmerio/winterjs/blob/master/tests/complex.js
|
MIT
|
constructHTML = (app) => {
return `
<!doctype html>
<html>
<body>
<div id="content">${app}</div>
</body>
</html>
`;
}
|
Sets a subset of the state. This only exists because _pendingState is
internal. This provides a merging strategy that is not available to deep
properties which is confusing. TODO: Expose pendingState or don't use it
during the merge.
@param {ReactClass} publicInstance The instance that should rerender.
@param {object} partialState Next partial state to be merged with state.
@param {?function} callback Called after component is updated.
@param {?string} Name of the calling function in the public API.
@internal
|
constructHTML
|
javascript
|
wasmerio/winterjs
|
tests/complex.js
|
https://github.com/wasmerio/winterjs/blob/master/tests/complex.js
|
MIT
|
handleRequest = async (request) => {
const headers = { "Content-Type": "text/html; charset=utf-8" };
const app = import_server.default.renderToString(/* @__PURE__ */ import_react2.default.createElement(app_default, null));
const html = constructHTML(app);
return new Response(html, { status: 200, headers });
}
|
Sets a subset of the state. This only exists because _pendingState is
internal. This provides a merging strategy that is not available to deep
properties which is confusing. TODO: Expose pendingState or don't use it
during the merge.
@param {ReactClass} publicInstance The instance that should rerender.
@param {object} partialState Next partial state to be merged with state.
@param {?function} callback Called after component is updated.
@param {?string} Name of the calling function in the public API.
@internal
|
handleRequest
|
javascript
|
wasmerio/winterjs
|
tests/complex.js
|
https://github.com/wasmerio/winterjs/blob/master/tests/complex.js
|
MIT
|
function combineBypassData(generator, bypassArr, plopArgV) {
// skip bypass if prompts is a function
if (typeof generator.prompts === "function") {
return [];
}
// Get named prompts that are passed to the command line
const promptNames = generator.prompts.map((prompt) => prompt.name);
// Check if bypassArr is too long for promptNames
if (bypassArr.length > promptNames.length) {
console.error(
chalk.red("[PLOP] ") +
'Too many bypass arguments passed for "' +
generator.name +
'"',
);
out.getHelpMessage(generator);
process.exit(1);
}
let namedBypassArr = [];
if (Object.keys(plopArgV).length > 0) {
// Let's make sure we made no whoopsy-poos (AKA passing incorrect inputs)
let errors = false;
Object.keys(plopArgV).forEach((arg) => {
if (!promptNames.find((name) => name === arg) && arg !== "_") {
console.error(
chalk.red("[PLOP] ") +
'"' +
arg +
'"' +
' is an invalid argument for "' +
generator.name +
'"',
);
errors = true;
}
});
if (errors) {
out.getHelpMessage(generator);
process.exit(1);
}
namedBypassArr = promptNames.map((name) =>
plopArgV[name] !== undefined ? plopArgV[name] : undefined,
);
}
// merge the bypass data with named bypass values
const mergedBypass = mergeArrays(bypassArr, namedBypassArr);
// clean up `undefined` values
return mergedBypass.map((v) => (v === undefined ? "_" : v));
}
|
Combine different types of bypass data
@param generator - The generator object involved
@param bypassArr - The array of overwritten properties
@param plopArgV - The original args passed to plop without using names
|
combineBypassData
|
javascript
|
plopjs/plop
|
packages/plop/src/bypass.js
|
https://github.com/plopjs/plop/blob/master/packages/plop/src/bypass.js
|
MIT
|
function mergeArrays(baseArr, overlay) {
const length = Math.max(baseArr.length, overlay.length);
return new Array(length)
.fill()
.map((v, i) => (overlay[i] !== undefined ? overlay[i] : baseArr[i]));
}
|
Combine different types of bypass data
@param generator - The generator object involved
@param bypassArr - The array of overwritten properties
@param plopArgV - The original args passed to plop without using names
|
mergeArrays
|
javascript
|
plopjs/plop
|
packages/plop/src/bypass.js
|
https://github.com/plopjs/plop/blob/master/packages/plop/src/bypass.js
|
MIT
|
function getBypassAndGenerator(plop, passArgsBeforeDashes) {
// See if there are args to pass to generator
const eoaIndex = args.indexOf("--");
const { plopArgV, eoaArg } = passArgsBeforeDashes
? { plopArgV: argv }
: eoaIndex === -1
? { plopArgV: [] }
: {
plopArgV: minimist(args.slice(eoaIndex + 1, args.length)),
eoaArg: args[eoaIndex + 1],
};
// locate the generator name based on input and take the rest of the
// user's input as prompt bypass data to be passed into the generator
let generatorName = "";
let bypassArr = [];
const generatorNames = plop.getGeneratorList().map((v) => v.name);
for (let i = 0; i < argv._.length; i++) {
const nameTest =
(generatorName.length ? generatorName + " " : "") + argv._[i];
if (listHasOptionThatStartsWith(generatorNames, nameTest)) {
generatorName = nameTest;
} else {
let index = argv._.findIndex((arg) => arg === eoaArg);
// If can't find index, slice until the very end - allowing all `_` to be passed
index = index !== -1 ? index : argv._.length;
// Force `'_'` to become undefined in nameless bypassArr
bypassArr = argv._.slice(i, index).map((arg) =>
/^_+$/.test(arg) ? undefined : arg,
);
break;
}
}
return { generatorName, bypassArr, plopArgV };
}
|
Parses the user input to identify the generator to run and any bypass data
@param plop - The plop context
@param passArgsBeforeDashes - Should we pass args before `--` to the generator API
|
getBypassAndGenerator
|
javascript
|
plopjs/plop
|
packages/plop/src/input-processing.js
|
https://github.com/plopjs/plop/blob/master/packages/plop/src/input-processing.js
|
MIT
|
function listHasOptionThatStartsWith(list, prefix) {
return list.some(function (txt) {
return txt.indexOf(prefix) === 0;
});
}
|
Parses the user input to identify the generator to run and any bypass data
@param plop - The plop context
@param passArgsBeforeDashes - Should we pass args before `--` to the generator API
|
listHasOptionThatStartsWith
|
javascript
|
plopjs/plop
|
packages/plop/src/input-processing.js
|
https://github.com/plopjs/plop/blob/master/packages/plop/src/input-processing.js
|
MIT
|
function handleArgFlags(env) {
// Make sure that we're not overwriting `help`, `init,` or `version` args in generators
if (argv._.length === 0) {
// handle request for usage and options
if (argv.help || argv.h) {
out.displayHelpScreen();
process.exit(0);
}
// handle request for initializing a new plopfile
if (argv.init || argv.i || argv[`init-ts`]) {
const force = argv.force === true || argv.f === true || false;
try {
out.createInitPlopfile(force, !!argv[`init-ts`]);
process.exit(0);
} catch (err) {
console.error(chalk.red("[PLOP] ") + err.message);
process.exit(1);
}
}
// handle request for version number
if (argv.version || argv.v) {
const localVersion = env.modulePackage.version;
if (localVersion !== globalPkg.version && localVersion != null) {
console.log(chalk.yellow("CLI version"), globalPkg.version);
console.log(chalk.yellow("Local version"), localVersion);
} else {
console.log(globalPkg.version);
}
process.exit(0);
}
}
// abort if there's no plopfile found
if (env.configPath == null) {
console.error(chalk.red("[PLOP] ") + "No plopfile found");
out.displayHelpScreen();
process.exit(1);
}
}
|
Handles all basic argument flags
@param env - Values parsed by Liftoff
|
handleArgFlags
|
javascript
|
plopjs/plop
|
packages/plop/src/input-processing.js
|
https://github.com/plopjs/plop/blob/master/packages/plop/src/input-processing.js
|
MIT
|
async function run(env, _, passArgsBeforeDashes) {
const plopfilePath = env.configPath;
// handle basic argument flags like --help, --version, etc
handleArgFlags(env);
// use base path from argv or env if any is present, otherwise set it to the plopfile directory
const destBasePath = argv.dest || env.dest;
let plop;
try {
plop = await nodePlop(plopfilePath, {
destBasePath: destBasePath ? path.resolve(destBasePath) : undefined,
force: argv.force === true || argv.f === true || false,
});
} catch (e) {
console.error(
chalk.red("[PLOP] ") + "Something went wrong with reading your plop file",
e,
);
return;
}
const generators = plop.getGeneratorList();
const generatorNames = generators.map((v) => v.name);
const { generatorName, bypassArr, plopArgV } = getBypassAndGenerator(
plop,
passArgsBeforeDashes,
);
// look up a generator and run it with calculated bypass data
const runGeneratorByName = (name) => {
const generator = plop.getGenerator(name);
const bypassData = combineBypassData(generator, bypassArr, plopArgV);
doThePlop(generator, bypassData);
};
// hmmmm, couldn't identify a generator in the user's input
if (!generators.length) {
// no generators?! there's clearly something wrong here
console.error(chalk.red("[PLOP] ") + "No generator found in plopfile");
process.exit(1);
} else if (!generatorName && generators.length === 1) {
// only one generator in this plopfile... let's assume they
// want to run that one!
runGeneratorByName(generatorNames[0]);
} else if (!generatorName && generators.length > 1 && !bypassArr.length) {
// more than one generator? we'll have to ask the user which
// one they want to run.
out
.chooseOptionFromList(generators, plop.getWelcomeMessage())
.then(runGeneratorByName)
.catch((err) => {
console.error(
chalk.red("[PLOP] ") +
"Something went wrong with selecting a generator",
err,
);
});
} else if (generatorNames.includes(generatorName)) {
// we have found the generator, run it!
runGeneratorByName(generatorName);
} else {
// we just can't make sense of your input... sorry :-(
const fuzzyGenName = (generatorName + " " + args.join(" ")).trim();
console.error(
chalk.red("[PLOP] ") +
'Could not find a generator for "' +
fuzzyGenName +
'"',
);
process.exit(1);
}
return plop;
}
|
The function to pass as the second argument to `Plop.execute`
@param env - This is passed implicitly
@param _ - Passed implicitly. Not needed, but allows for `passArgsBeforeDashes` to be explicitly passed
@param passArgsBeforeDashes - An opt-in `true` boolean that will allow merging of plop CLI API and generator API
@example
Plop.execute(env => run(env, undefined, true))
!!!!!! WARNING !!!!!!
One of the reasons we default generator arguments as anything past `--` is a few reasons:
Primarily that there may be name-spacing issues when combining the arg order and named arg passing
|
run
|
javascript
|
plopjs/plop
|
packages/plop/src/plop.js
|
https://github.com/plopjs/plop/blob/master/packages/plop/src/plop.js
|
MIT
|
runGeneratorByName = (name) => {
const generator = plop.getGenerator(name);
const bypassData = combineBypassData(generator, bypassArr, plopArgV);
doThePlop(generator, bypassData);
}
|
The function to pass as the second argument to `Plop.execute`
@param env - This is passed implicitly
@param _ - Passed implicitly. Not needed, but allows for `passArgsBeforeDashes` to be explicitly passed
@param passArgsBeforeDashes - An opt-in `true` boolean that will allow merging of plop CLI API and generator API
@example
Plop.execute(env => run(env, undefined, true))
!!!!!! WARNING !!!!!!
One of the reasons we default generator arguments as anything past `--` is a few reasons:
Primarily that there may be name-spacing issues when combining the arg order and named arg passing
|
runGeneratorByName
|
javascript
|
plopjs/plop
|
packages/plop/src/plop.js
|
https://github.com/plopjs/plop/blob/master/packages/plop/src/plop.js
|
MIT
|
runGeneratorByName = (name) => {
const generator = plop.getGenerator(name);
const bypassData = combineBypassData(generator, bypassArr, plopArgV);
doThePlop(generator, bypassData);
}
|
The function to pass as the second argument to `Plop.execute`
@param env - This is passed implicitly
@param _ - Passed implicitly. Not needed, but allows for `passArgsBeforeDashes` to be explicitly passed
@param passArgsBeforeDashes - An opt-in `true` boolean that will allow merging of plop CLI API and generator API
@example
Plop.execute(env => run(env, undefined, true))
!!!!!! WARNING !!!!!!
One of the reasons we default generator arguments as anything past `--` is a few reasons:
Primarily that there may be name-spacing issues when combining the arg order and named arg passing
|
runGeneratorByName
|
javascript
|
plopjs/plop
|
packages/plop/src/plop.js
|
https://github.com/plopjs/plop/blob/master/packages/plop/src/plop.js
|
MIT
|
function doThePlop(generator, bypassArr) {
let failedActions = false;
generator
.runPrompts(bypassArr)
.then(async (answers) => {
return answers;
})
.then((answers) => {
const noMap = argv["show-type-names"] || argv.t;
const onComment = (msg) => {
progressSpinner.info(msg);
progressSpinner.start();
};
const onSuccess = (change) => {
let line = "";
if (change.type) {
line += ` ${out.typeMap(change.type, noMap)}`;
}
if (change.path) {
line += ` ${change.path}`;
}
progressSpinner.succeed(line);
progressSpinner.start();
};
const onFailure = (fail) => {
let line = "";
if (fail.type) {
line += ` ${out.typeMap(fail.type, noMap)}`;
}
if (fail.path) {
line += ` ${fail.path}`;
}
const errMsg = fail.error || fail.message;
if (errMsg) {
line += ` ${errMsg}`;
}
progressSpinner.fail(line);
failedActions = true;
progressSpinner.start();
};
progressSpinner.start();
return generator
.runActions(answers, { onSuccess, onFailure, onComment })
.then(() => {
progressSpinner.stop();
if (failedActions) process.exit(1);
});
})
.catch(function (err) {
console.error(chalk.red("[ERROR]"), err.message);
process.exit(1);
});
}
|
The function to pass as the second argument to `Plop.execute`
@param env - This is passed implicitly
@param _ - Passed implicitly. Not needed, but allows for `passArgsBeforeDashes` to be explicitly passed
@param passArgsBeforeDashes - An opt-in `true` boolean that will allow merging of plop CLI API and generator API
@example
Plop.execute(env => run(env, undefined, true))
!!!!!! WARNING !!!!!!
One of the reasons we default generator arguments as anything past `--` is a few reasons:
Primarily that there may be name-spacing issues when combining the arg order and named arg passing
|
doThePlop
|
javascript
|
plopjs/plop
|
packages/plop/src/plop.js
|
https://github.com/plopjs/plop/blob/master/packages/plop/src/plop.js
|
MIT
|
onComment = (msg) => {
progressSpinner.info(msg);
progressSpinner.start();
}
|
The function to pass as the second argument to `Plop.execute`
@param env - This is passed implicitly
@param _ - Passed implicitly. Not needed, but allows for `passArgsBeforeDashes` to be explicitly passed
@param passArgsBeforeDashes - An opt-in `true` boolean that will allow merging of plop CLI API and generator API
@example
Plop.execute(env => run(env, undefined, true))
!!!!!! WARNING !!!!!!
One of the reasons we default generator arguments as anything past `--` is a few reasons:
Primarily that there may be name-spacing issues when combining the arg order and named arg passing
|
onComment
|
javascript
|
plopjs/plop
|
packages/plop/src/plop.js
|
https://github.com/plopjs/plop/blob/master/packages/plop/src/plop.js
|
MIT
|
onComment = (msg) => {
progressSpinner.info(msg);
progressSpinner.start();
}
|
The function to pass as the second argument to `Plop.execute`
@param env - This is passed implicitly
@param _ - Passed implicitly. Not needed, but allows for `passArgsBeforeDashes` to be explicitly passed
@param passArgsBeforeDashes - An opt-in `true` boolean that will allow merging of plop CLI API and generator API
@example
Plop.execute(env => run(env, undefined, true))
!!!!!! WARNING !!!!!!
One of the reasons we default generator arguments as anything past `--` is a few reasons:
Primarily that there may be name-spacing issues when combining the arg order and named arg passing
|
onComment
|
javascript
|
plopjs/plop
|
packages/plop/src/plop.js
|
https://github.com/plopjs/plop/blob/master/packages/plop/src/plop.js
|
MIT
|
onSuccess = (change) => {
let line = "";
if (change.type) {
line += ` ${out.typeMap(change.type, noMap)}`;
}
if (change.path) {
line += ` ${change.path}`;
}
progressSpinner.succeed(line);
progressSpinner.start();
}
|
The function to pass as the second argument to `Plop.execute`
@param env - This is passed implicitly
@param _ - Passed implicitly. Not needed, but allows for `passArgsBeforeDashes` to be explicitly passed
@param passArgsBeforeDashes - An opt-in `true` boolean that will allow merging of plop CLI API and generator API
@example
Plop.execute(env => run(env, undefined, true))
!!!!!! WARNING !!!!!!
One of the reasons we default generator arguments as anything past `--` is a few reasons:
Primarily that there may be name-spacing issues when combining the arg order and named arg passing
|
onSuccess
|
javascript
|
plopjs/plop
|
packages/plop/src/plop.js
|
https://github.com/plopjs/plop/blob/master/packages/plop/src/plop.js
|
MIT
|
onSuccess = (change) => {
let line = "";
if (change.type) {
line += ` ${out.typeMap(change.type, noMap)}`;
}
if (change.path) {
line += ` ${change.path}`;
}
progressSpinner.succeed(line);
progressSpinner.start();
}
|
The function to pass as the second argument to `Plop.execute`
@param env - This is passed implicitly
@param _ - Passed implicitly. Not needed, but allows for `passArgsBeforeDashes` to be explicitly passed
@param passArgsBeforeDashes - An opt-in `true` boolean that will allow merging of plop CLI API and generator API
@example
Plop.execute(env => run(env, undefined, true))
!!!!!! WARNING !!!!!!
One of the reasons we default generator arguments as anything past `--` is a few reasons:
Primarily that there may be name-spacing issues when combining the arg order and named arg passing
|
onSuccess
|
javascript
|
plopjs/plop
|
packages/plop/src/plop.js
|
https://github.com/plopjs/plop/blob/master/packages/plop/src/plop.js
|
MIT
|
onFailure = (fail) => {
let line = "";
if (fail.type) {
line += ` ${out.typeMap(fail.type, noMap)}`;
}
if (fail.path) {
line += ` ${fail.path}`;
}
const errMsg = fail.error || fail.message;
if (errMsg) {
line += ` ${errMsg}`;
}
progressSpinner.fail(line);
failedActions = true;
progressSpinner.start();
}
|
The function to pass as the second argument to `Plop.execute`
@param env - This is passed implicitly
@param _ - Passed implicitly. Not needed, but allows for `passArgsBeforeDashes` to be explicitly passed
@param passArgsBeforeDashes - An opt-in `true` boolean that will allow merging of plop CLI API and generator API
@example
Plop.execute(env => run(env, undefined, true))
!!!!!! WARNING !!!!!!
One of the reasons we default generator arguments as anything past `--` is a few reasons:
Primarily that there may be name-spacing issues when combining the arg order and named arg passing
|
onFailure
|
javascript
|
plopjs/plop
|
packages/plop/src/plop.js
|
https://github.com/plopjs/plop/blob/master/packages/plop/src/plop.js
|
MIT
|
onFailure = (fail) => {
let line = "";
if (fail.type) {
line += ` ${out.typeMap(fail.type, noMap)}`;
}
if (fail.path) {
line += ` ${fail.path}`;
}
const errMsg = fail.error || fail.message;
if (errMsg) {
line += ` ${errMsg}`;
}
progressSpinner.fail(line);
failedActions = true;
progressSpinner.start();
}
|
The function to pass as the second argument to `Plop.execute`
@param env - This is passed implicitly
@param _ - Passed implicitly. Not needed, but allows for `passArgsBeforeDashes` to be explicitly passed
@param passArgsBeforeDashes - An opt-in `true` boolean that will allow merging of plop CLI API and generator API
@example
Plop.execute(env => run(env, undefined, true))
!!!!!! WARNING !!!!!!
One of the reasons we default generator arguments as anything past `--` is a few reasons:
Primarily that there may be name-spacing issues when combining the arg order and named arg passing
|
onFailure
|
javascript
|
plopjs/plop
|
packages/plop/src/plop.js
|
https://github.com/plopjs/plop/blob/master/packages/plop/src/plop.js
|
MIT
|
function renderScript(script, args = [], opts = {}) {
const { cwd = __dirname } = opts;
return render(
resolve(__dirname, "../node_modules/.bin/nyc"),
["--silent", "node", script, ...args],
{
cwd,
spawnOpts: {
env: { ...process.env, NODE_ENV: "test" },
},
},
);
}
|
@param {String} script
@param {Array} args
@param {Object} opts
|
renderScript
|
javascript
|
plopjs/plop
|
packages/plop/tests/render.js
|
https://github.com/plopjs/plop/blob/master/packages/plop/tests/render.js
|
MIT
|
function renderPlop(args = [], opts = {}) {
return renderScript(
resolve(__dirname, "../instrumented/bin/plop.js"),
args,
opts,
);
}
|
@param {Array} args
@param {Object} opts
|
renderPlop
|
javascript
|
plopjs/plop
|
packages/plop/tests/render.js
|
https://github.com/plopjs/plop/blob/master/packages/plop/tests/render.js
|
MIT
|
RollerProvider = function(_mongoose, _TaskMdl) {
mongoose = _mongoose;
TaskMdl = _TaskMdl;
dbUtils = require("./utils/db.utils")(mongoose);
getModel = dbUtils.getModel;
getCollectionForStep = dbUtils.getCollectionForStep;
return Roller;
}
|
Initializes variables and gets the roller.
@param _mongoose the mongoose instance
@param _TaskMdl the task model
@returns An object containing rollback functions
@constructor
|
RollerProvider
|
javascript
|
e-oj/Fawn
|
lib/roller.js
|
https://github.com/e-oj/Fawn/blob/master/lib/roller.js
|
MIT
|
function rollBackTask(task) {
var db = mongoose.connection.db;
var chain = Promise.resolve();
var lastIndex = task.steps.length - 1;
var firstStep = task.steps[0];
var lastStep = task.steps[lastIndex];
var step;
if (lastStep.state !== DONE && firstStep.state !== INITIAL) {
for(var i = lastIndex; i >= 0 ; i--){
step = task.steps[i];
if (step.state === INITIAL || step.state === ROLLED) continue;
//iife to avoid async issues
(function(step){
chain = chain.then(function() {
return getRollbackFunc(step)(db, step, task);
});
})(step);
}
}
return chain.then(function() {
return task.remove();
});
}
|
Rollback for a single task
@param task the task to roll back
@returns {Promise|*}
|
rollBackTask
|
javascript
|
e-oj/Fawn
|
lib/roller.js
|
https://github.com/e-oj/Fawn/blob/master/lib/roller.js
|
MIT
|
function getRollbackFunc(step) {
switch(step.type) {
case SAVE: return rollbackSave;
case UPDATE:
case REMOVE: return rollbackRemoveOrUpdate;
case FILE_SAVE: return rollbackFileSave;
case FILE_REMOVE: return rollbackFileRemove;
}
}
|
Gets the correct rollback function for a step
@param step the step to rollback
@returns a function to rollback step
|
getRollbackFunc
|
javascript
|
e-oj/Fawn
|
lib/roller.js
|
https://github.com/e-oj/Fawn/blob/master/lib/roller.js
|
MIT
|
function rollbackSave(db, save, task) {
var collection = getCollectionForStep(db, save);
var _id = save.dataStore[0]._id;
return collection.deleteOne({_id: _id}).then(function(){
return updateState(task, save.index, ROLLED);
});
}
|
Rollback for a save step
@param db native db
@param save the save step
@param task the task containing the step
@returns {Promise|*}
|
rollbackSave
|
javascript
|
e-oj/Fawn
|
lib/roller.js
|
https://github.com/e-oj/Fawn/blob/master/lib/roller.js
|
MIT
|
function rollbackRemoveOrUpdate(db, step, task) {
var collection = getCollectionForStep(db, step);
var chain = Promise.resolve();
step.dataStore.forEach(function(data) {
chain = chain.then(function() {
var condition = {_id: data._id};
return collection.findOne(condition).then(function (doc) {
if (doc && step.type === UPDATE) {
return collection.updateOne(condition, data);
}
else if (!doc && step.type === REMOVE) {
return collection.insertOne(data);
}
return Promise.resolve();
});
});
});
return chain.then(function(){
return updateState(task, step.index, ROLLED);
});
}
|
Rollback for remove or update step.
@param db native db
@param step the update or remove step
@param task the task containing the step.
@returns {Promise|*}
|
rollbackRemoveOrUpdate
|
javascript
|
e-oj/Fawn
|
lib/roller.js
|
https://github.com/e-oj/Fawn/blob/master/lib/roller.js
|
MIT
|
function rollbackFileSave(db, step, task) {
var gfs = new Grid(db);
return dbUtils.removeFile(step.dataStore[0]._id, gfs)
.then(function () {
return updateState(task, step.index, ROLLED);
});
}
|
Rollback for file save step.
@param db native db
@param step the file save step
@param task the task containing the step.
@returns {Promise|*}
|
rollbackFileSave
|
javascript
|
e-oj/Fawn
|
lib/roller.js
|
https://github.com/e-oj/Fawn/blob/master/lib/roller.js
|
MIT
|
function rollbackFileRemove(db, step, task) {
return new Promise(function (resolve, reject) {
var gfs = Grid(db);
var data = step.dataStore[0];
gfs.exist({_id: data.removed}, function (err, exists) {
if (err) return reject(err);
gfs.findOne({_id: data.shadow}, function (err, shadowFile) {
if (err) return reject(err);
if (!shadowFile) return resolve();
function done() {
dbUtils.removeFile(data.shadow, gfs).then(function () {
updateState(task, step.index, ROLLED)
.then(resolve)
.catch(reject);
});
}
if (exists) return done();
var writeStream = gfs.createWriteStream(shadowFile.metadata.oldFile);
writeStream.on("close", done);
writeStream.on("error", reject);
gfs.createReadStream({_id: data.shadow}).pipe(writeStream);
});
});
});
}
|
Rollback for file remove step.
@param db native db
@param step the file remove step
@param task the task containing the step.
@returns {Promise|*}
|
rollbackFileRemove
|
javascript
|
e-oj/Fawn
|
lib/roller.js
|
https://github.com/e-oj/Fawn/blob/master/lib/roller.js
|
MIT
|
function done() {
dbUtils.removeFile(data.shadow, gfs).then(function () {
updateState(task, step.index, ROLLED)
.then(resolve)
.catch(reject);
});
}
|
Rollback for file remove step.
@param db native db
@param step the file remove step
@param task the task containing the step.
@returns {Promise|*}
|
done
|
javascript
|
e-oj/Fawn
|
lib/roller.js
|
https://github.com/e-oj/Fawn/blob/master/lib/roller.js
|
MIT
|
TaskProvider = function (_mongoose, _TaskMdl) {
mongoose = _mongoose;
Grid.mongo = mongoose.mongo;
TaskMdl = _TaskMdl;
Roller = require("./roller")(mongoose, _TaskMdl);
dbUtils = require("./utils/db.utils")(mongoose);
native.setDbUtils(dbUtils);
modelCache = dbUtils.modelCache;
setModel = dbUtils.setModel;
getModel = dbUtils.getModel;
goose.setDbUtils(dbUtils);
return Task;
}
|
Provider for the Task class. It initializes all the
required variables and returns the Task class. Used
internally.
@param _mongoose The mongoose instance to be used
@param _TaskMdl The mongoose model for tasks (where tasks are stored)
@returns {Task}
@constructor
|
TaskProvider
|
javascript
|
e-oj/Fawn
|
lib/task.js
|
https://github.com/e-oj/Fawn/blob/master/lib/task.js
|
MIT
|
Task = function() {
var task = this;
var index = 0;
var steps = [];
/**
* Mainly used internally for tests.
*
* @returns {TaskMdl} the mongoose model for the tasks
*/
task.getTaskCollection = function() {
return TaskMdl;
};
/**
* @see dbUtils.initModel
*
* @param modelName The intended name of the model
* @param schema The schema associated with this model
* @returns {Task}
*/
task.initModel = function(modelName, schema) {
dbUtils.initModel(modelName, schema);
return task;
};
/**
* Adds an update step (updateObj) to the steps queue
* and increments the index.
*
* @param model the model or document to update
* @param condition the condition or data for this update
* @param data the data for this update
*
* @returns {Task}
*/
task.update = function(model, condition, data) {
if (!data) {
if (!validDoc(model)) throw new Error("Invalid doc");
data = condition;
condition = {_id: model._id};
model = model.constructor;
}
if (!validModel(model)) throw new Error("Invalid model");
if (!isObject(condition)) throw new Error("Invalid Condition");
if (!isObject(data)) throw new Error("Invalid data");
var updateObj = {
index: index
, type: UPDATE
, state: INITIAL
, name: getModelName(model)
, condition: xcode(condition)
, data: xcode(data)
};
steps.push(updateObj);
index++;
return task;
};
/**
* Adds a save step (saveObj) to the steps queue
* and increments the index.
*
* @param model the model we're saving to or document to save
* @param doc the object to be saved
*
* @returns {Task}
*/
task.save = function(model, doc) {
if (!doc) {
if (!validDoc(model)) throw new Error("Invalid doc");
doc = model.toObject();
model = model.constructor;
}
else if (validDoc(doc)) doc = doc.toObject();
if (!validModel(model)) throw new Error("Invalid Model");
if (!isObject(doc)) throw new Error("Invalid doc");
var saveObj = {
index: index
, type: SAVE
, state: INITIAL
, name: getModelName(model)
, data: xcode(doc)
};
steps.push(saveObj);
index++;
return task;
};
/**
* Adds a remove step (removeObj) to the steps queue
* and increments the index.
*
* @param model the model we're removing from or document to remove
* @param condition the condition for removal
*
* @returns {Task}
*/
task.remove = function(model, condition) {
if (!condition) {
if (!validDoc(model)) throw new Error("Invalid doc");
condition = {_id: model._id};
model = model.constructor;
}
if (!validModel(model)) throw new Error("Invalid Model");
if (!isObject(condition)) throw new Error("Invalid Condition");
var removeObj = {
index: index
, type: REMOVE
, state: INITIAL
, name: getModelName(model)
, condition: xcode(condition)
};
steps.push(removeObj);
index++;
return task;
};
/**
* Adds options to an update step.
*
* @param options the options to be added
*
* @returns {Task}
*/
task.options = function(options) {
if (!steps.length) throw new Error("Can't set options on non-existing task");
if (!isObject(options)) throw new Error("Invalid Options");
var obj = steps[steps.length - 1];
if (obj.type !== UPDATE) {
throw new Error("the " + obj.type + " function does not accept options");
}
obj.options = options;
return task;
};
/**
* Adds a saveFile step (saveFileObj) to the steps queue
* and increments the index.
*
* @param filePath path of the file to be saved
* @param options options for saving the file
*
* @returns {Task}
*/
task.saveFile = function (filePath, options) {
if (!filePath) throw new Error("File path is required and must be a string");
if (options && !isObject(options)) throw new Error("options must be an object");
var saveFileObj = {
type: FILE_SAVE
, index: index
, state: INITIAL
, data: {
file_path: filePath
}
, options: xcode(options)
};
steps.push(saveFileObj);
index++;
return task;
};
/**
* Adds a removeFile step (removeFileObj) to the steps queue
* and increments the index.
*
* @param options options for removing the file
*
* @returns {Task}
*/
task.removeFile = function (options) {
if (!isObject(options)) throw new Error("options required. Must be an object");
var removeFileObj = {
type: FILE_REMOVE
, index: index
, state: INITIAL
, options: xcode(options)
};
steps.push(removeFileObj);
index++;
return task;
};
/**
* Runs a task. This function saves the steps to
* the db and proceeds to complete each step. If
* any of the steps fail, all previously completed
* steps get rolled back and the causal error is
* returned through a promise
*
* @options options to run with
*
* @returns a promise
*/
task.run = function(options){
var chain = Promise.resolve();
var dbTask = new TaskMdl({steps: steps});
steps = [];
index = 0;
var results = [];
return dbTask.save().then(function (_task) {
_task.steps.forEach(function (step) {
chain = chain.then(function () {
if(options) step.useMongoose = options.useMongoose;
return getResolveFunc(step)(step, _task, results);
});
});
return chain.then(function () {
var gfs = Grid(mongoose.connection.db);
var removeChain = dbUtils.makeRemoveChain(_task, gfs);
return removeChain.then(function () {
return _task.constructor.collection.deleteOne({_id: _task._id})
.then(function () {
return Promise.resolve(results);
});
});
}).catch(function (err) {
return Roller.rollOne(_task).then(function () {
throw err;
});
});
});
};
}
|
The task class. It contains all the functions associated
with a task. Enables edits to be queued as a series of
steps and run, in the order they were queued, in a
way that allows the edits to be rolled back in the event
of a failure.
@constructor
|
Task
|
javascript
|
e-oj/Fawn
|
lib/task.js
|
https://github.com/e-oj/Fawn/blob/master/lib/task.js
|
MIT
|
function getResolveFunc(step) {
switch(step.type){
case UPDATE: return performUpdate;
case SAVE: return performSave;
case REMOVE: return performRemove;
case FILE_SAVE: return performFileSave;
case FILE_REMOVE: return performFileRemove;
}
}
|
The appropriate function to resolve a
step
@param step the step to resolve
@returns a function to handle the step
|
getResolveFunc
|
javascript
|
e-oj/Fawn
|
lib/task.js
|
https://github.com/e-oj/Fawn/blob/master/lib/task.js
|
MIT
|
function performUpdate(step, task, results) {
var db = mongoose.connection.db;
return step.useMongoose
? goose.performUpdate(db, step, task, results)
: native.nativeUpdate(db, step, task, results)
}
|
This function handles the update step.
@param step the update step
@param task the task which step belongs to
@param results array of results from previous operations
@returns {Promise|*}
|
performUpdate
|
javascript
|
e-oj/Fawn
|
lib/task.js
|
https://github.com/e-oj/Fawn/blob/master/lib/task.js
|
MIT
|
function performSave(step, task, results) {
var db = mongoose.connection.db;
return step.useMongoose
? goose.performSave(step, task, results)
: native.nativeSave(db, step, task, results)
}
|
This function handles the save step.
@param step the save step
@param task the task which step belongs to
@param results array of results from previous operations
@returns {Promise|*}
|
performSave
|
javascript
|
e-oj/Fawn
|
lib/task.js
|
https://github.com/e-oj/Fawn/blob/master/lib/task.js
|
MIT
|
function performRemove(step, task, results) {
var db = mongoose.connection.db;
return step.useMongoose
? goose.performRemove(db, step, task, results)
: native.nativeRemove(db, step, task, results)
}
|
This function handles the remove step.
@param step the remove step
@param task the task which step belongs to
@param results array of results from previous operations
@returns {Promise|*}
|
performRemove
|
javascript
|
e-oj/Fawn
|
lib/task.js
|
https://github.com/e-oj/Fawn/blob/master/lib/task.js
|
MIT
|
function performFileSave(step, task, results) {
var options = step.options ? xcode(step.options, true) : {};
resolveFuture(options, results);
options._id = options._id || dbUtils.generateId();
step.options = options;
step.dataStore = [{_id: options._id}];
return updateState(task, step.index, PENDING).then(function () {
return new Promise(function (resolve, reject) {
var conn = mongoose.connection;
var gfs = Grid(conn.db);
var writeStream = gfs.createWriteStream(options);
writeStream.on("close", function (file) {
results.push(file);
resolve(results);
});
writeStream.on("error", reject);
fs.createReadStream(step.data.file_path).pipe(writeStream);
}).then(function (results) {
return updateState(task, step.index, DONE, results);
});
});
}
|
This function handles the file save step.
@param step the file save step
@param task the task which step belongs to
@param results array of results from previous operations
@returns {Promise|*}
|
performFileSave
|
javascript
|
e-oj/Fawn
|
lib/task.js
|
https://github.com/e-oj/Fawn/blob/master/lib/task.js
|
MIT
|
function performFileRemove(step, task, results) {
var options = xcode(step.options, true);
resolveFuture(options, results);
return storeOldFile(task, step).then(function (file) {
return updateState(task, step.index, PENDING).then(function () {
var chain = Promise.resolve();
if (!file) {
results.push(null);
}
else{
var gfs = Grid(mongoose.connection.db);
chain = dbUtils.removeFile(options, gfs).then(function (result) {
results.push(result);
});
}
return chain.then(function () {
return updateState(task, step.index, DONE, results);
});
});
});
}
|
This function handles the file remove step.
@param step the file remove step
@param task the task which step belongs to
@param results array of results from previous operations
@returns {Promise|*}
|
performFileRemove
|
javascript
|
e-oj/Fawn
|
lib/task.js
|
https://github.com/e-oj/Fawn/blob/master/lib/task.js
|
MIT
|
function storeOldFile(task, step) {
return new Promise(function (resolve, reject) {
var gfs = Grid(mongoose.connection.db);
var options = xcode(step.options, true);
gfs.findOne(options, function (err, file) {
if (err) return reject(err);
if (!file) return resolve(false);
var collection = task.constructor.collection;
step.dataStore = [{removed: file._id, shadow: dbUtils.generateId()}];
collection.updateOne({_id: task._id}, task.toObject()).then(function(){
var writeStream = gfs.createWriteStream({
_id: step.dataStore[0].shadow,
metadata: {oldFile: file}
});
writeStream.on("close", resolve);
writeStream.on("error", reject);
gfs.createReadStream({_id: file._id}).pipe(writeStream);
});
});
});
}
|
This function stores a file that's about to be
removed by a step, for rollback purposes
@param task the task
@param step the step
@returns {Promise|*}
|
storeOldFile
|
javascript
|
e-oj/Fawn
|
lib/task.js
|
https://github.com/e-oj/Fawn/blob/master/lib/task.js
|
MIT
|
function getCollection(name, schema){
if (schema) return mongoose.model(name, schema);
try {
return mongoose.model(name);
} catch (err){
initModel(name, schema);
return mongoose.model(name);
}
}
|
gets a collection as a mongoose model.
@param name name of the collection
@param schema schema for the model
|
getCollection
|
javascript
|
e-oj/Fawn
|
lib/utils/db.utils.js
|
https://github.com/e-oj/Fawn/blob/master/lib/utils/db.utils.js
|
MIT
|
function setModel(name, schema) {
modelCache[name] = getCollection(name, schema);
}
|
Adds a model to the model cache
@param name name of model
@param schema schema for the model
|
setModel
|
javascript
|
e-oj/Fawn
|
lib/utils/db.utils.js
|
https://github.com/e-oj/Fawn/blob/master/lib/utils/db.utils.js
|
MIT
|
function getModel(name, schema) {
if (!modelCache[name]) {
setModel(name, schema);
}
return modelCache[name];
}
|
Gets a mongoose model. Creates one if it
doesn't exist already.
@param name name of the model to retrieve
@param schema schema for the model
@returns a mongoose model
|
getModel
|
javascript
|
e-oj/Fawn
|
lib/utils/db.utils.js
|
https://github.com/e-oj/Fawn/blob/master/lib/utils/db.utils.js
|
MIT
|
function initModel(modelName, schema) {
if (modelCache[modelName]) throw new Error("The schema for this model has already been set");
if (schema && typeof schema !== "object") throw new Error("Invalid Schema");
var DEFAULT_SCHEMA = new Schema({}, {strict: false});
setModel(modelName, schema ? new Schema(schema, {strict: true}) : DEFAULT_SCHEMA);
}
|
Initializes a mongoose model with name: modelName.
If a schema is provided, it will be used to construct the model
else, the model will be initialized with a default, unrestricted
schema.
@param modelName The intended name of the model
@param schema The schema associated with this model
|
initModel
|
javascript
|
e-oj/Fawn
|
lib/utils/db.utils.js
|
https://github.com/e-oj/Fawn/blob/master/lib/utils/db.utils.js
|
MIT
|
function dropCollection(collection) {
return new Promise(function(resolve, reject) {
mongoose.connection.db.dropCollection(collection, function(err) {
if(err) reject(err);
else resolve();
});
});
}
|
Drops a MongoDB collection. For testing.
@param collection the name of the collection to be dropped
|
dropCollection
|
javascript
|
e-oj/Fawn
|
lib/utils/db.utils.js
|
https://github.com/e-oj/Fawn/blob/master/lib/utils/db.utils.js
|
MIT
|
function removeFile(id, gfs) {
var options = {_id: id};
if (id.constructor && id.constructor === Object) {
options = id;
}
return new Promise(function (resolve, reject) {
gfs.remove(options, function (err, result) {
if (err) return reject(err);
resolve(result);
});
});
}
|
Removes a file from the db
@param id file id or options object
@param gfs GridFS
|
removeFile
|
javascript
|
e-oj/Fawn
|
lib/utils/db.utils.js
|
https://github.com/e-oj/Fawn/blob/master/lib/utils/db.utils.js
|
MIT
|
function fileExists(id, gfs) {
return new Promise(function (resolve, reject) {
gfs.exist({_id: id}, function (err, exists) {
if (err) return reject(err);
resolve(exists);
});
});
}
|
Checks if a file exists in the db with the
specified file name
@param id MongoDB ObjectId
@param gfs GridFS
|
fileExists
|
javascript
|
e-oj/Fawn
|
lib/utils/db.utils.js
|
https://github.com/e-oj/Fawn/blob/master/lib/utils/db.utils.js
|
MIT
|
function makeRemoveChain(task, gfs) {
var removeChain = Promise.resolve();
task.steps.forEach(function (step) {
if (step.type !== constants.FILE_REMOVE) return;
var shadowId = step.dataStore[0].shadow;
removeChain = removeChain.then(function () {
return fileExists(shadowId, gfs)
.then(function (exists) {
if (!exists) return Promise.resolve();
return removeFile(shadowId, gfs);
});
});
});
return removeChain;
}
|
Chains together the deletion of shadow files
from file remove steps.
@param task the task to check for shadow files
@param gfs GridFS
@returns {Promise|*}
|
makeRemoveChain
|
javascript
|
e-oj/Fawn
|
lib/utils/db.utils.js
|
https://github.com/e-oj/Fawn/blob/master/lib/utils/db.utils.js
|
MIT
|
function getCollectionForStep(db, step){
return step.useMongoose
? mongoose.model(step.name).collection
: db.collection(step.name)
}
|
Gets the collection for a step
@param db native db
@param step the step in question
@returns native collection
|
getCollectionForStep
|
javascript
|
e-oj/Fawn
|
lib/utils/db.utils.js
|
https://github.com/e-oj/Fawn/blob/master/lib/utils/db.utils.js
|
MIT
|
function storeOldData(db, step, condition){
var Collection = getCollectionForStep(db, step);
var options = step.options
? utils.xcode(step.options)
: step.type === constants.REMOVE ? {multi: true} : null;
var query = Collection.find(condition);
var searchQuery = options && options.multi === true
? query
: query.limit(1);
return searchQuery.toArray().then(function(result){
step.dataStore = result;
});
}
|
This function stores data that's about to be
changed by a step, for rollback purposes
@param db native db
@param step the step
@param condition literal obj rep of the step's condition
@returns {Promise|*}
|
storeOldData
|
javascript
|
e-oj/Fawn
|
lib/utils/db.utils.js
|
https://github.com/e-oj/Fawn/blob/master/lib/utils/db.utils.js
|
MIT
|
function encode(obj, key){
var newKey = null;
if(key[0] === "$"){
newKey = $ + key;
}
if(key.includes(".")){
newKey = (newKey || key).split(".").join(DOT);
}
if(newKey){
obj[newKey] = obj[key];
delete obj[key]
}
}
|
encodes object keys starting with "$"
or containing "."
@param obj object containing suspect key
@param key key to encode
|
encode
|
javascript
|
e-oj/Fawn
|
lib/utils/gen.utils.js
|
https://github.com/e-oj/Fawn/blob/master/lib/utils/gen.utils.js
|
MIT
|
function decode(obj, key){
var newKey = key.split($).join("").split(DOT).join(".");
if(key !== newKey){
obj[newKey] = obj[key];
delete obj[key];
}
}
|
Decodes an object's encoded keys
@param obj obj with possibly encoded key
@param key key to decode
|
decode
|
javascript
|
e-oj/Fawn
|
lib/utils/gen.utils.js
|
https://github.com/e-oj/Fawn/blob/master/lib/utils/gen.utils.js
|
MIT
|
constructor () {
/**
* Stores prop->value mappings.
* @type {Map<string|symbol, unknown>}
*/
this._properties = new Map()
/**
* Stores prop->Promise mappings.
* @type {Map<string|symbol, pDefer.DeferredPromise<unknown>>}
*/
this._promiseMap = new Map()
}
|
Context helps the app do many different things without explicitly depending on each other. Instead, each module
can set a property on the context and other modules can get that property from the context when they need it.
Benefits:
* Avoid passing the same object to many different modules.
* Avoid circular dependencies and makes it easier to test modules in isolation.
* Speed up startup time by only loading what we need when we need it.
| Context property exists? | Is the backing promise fulfilled? | Method called | Is a deferred promise created? | Returned Value |
|--------------------------|-----------------------------------|---------------|--------------------------------|----------------------------------------------------------------------------------------------------------|
| No | N/A | GetProp | Yes | A newly created deferred promise(unfulfilled) |
| No | N/A | SetProp | Yes | void |
| Yes | No | GetProp | No | The found deferred promise (unfulfilled) |
| Yes | No | SetProp | No | void |
| Yes | Yes | GetProp | No | The found deferred promise (fulfilled) |
| Yes | Yes | SetProp | No | We throw an error here. Any getProps called for the property prior to this would have a hanging promise. |
@extends {Record<string, unknown>}
@property {function} launchWebUI
|
constructor
|
javascript
|
ipfs/ipfs-desktop
|
src/context.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/context.js
|
MIT
|
setProp (propertyName, value) {
if (this._properties.has(propertyName)) {
logger.error('[ctx] Property already exists')
throw new Error(`[ctx] Property ${String(propertyName)} already exists`)
}
logger.info(`[ctx] setting ${String(propertyName)}`)
try {
this._properties.set(propertyName, value)
this._resolvePropToValue(propertyName, value)
} catch (e) {
logger.error(e)
}
}
|
Set the value of a property to a value.
This method supports overwriting values.
@template T
@param {ContextProperties} propertyName
@param {T} value
@returns {void}
|
setProp
|
javascript
|
ipfs/ipfs-desktop
|
src/context.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/context.js
|
MIT
|
async getProp (propertyName) {
logger.info(`[ctx] getting ${String(propertyName)}`)
const value = this._properties.get(propertyName)
if (value != null) {
logger.info(`[ctx] Found existing property ${String(propertyName)}`)
this._resolvePropToValue(propertyName, value)
// @ts-ignore
return value
} else {
logger.info(`[ctx] Could not find property ${String(propertyName)}`)
}
// no value exists, create deferred promise and return the promise
return this._createDeferredForProp(propertyName).promise
}
|
Get the value of a property wrapped in a promise.
@template T
@param {ContextProperties} propertyName
@returns {Promise<T>}
|
getProp
|
javascript
|
ipfs/ipfs-desktop
|
src/context.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/context.js
|
MIT
|
getFn (propertyName) {
const originalFnPromise = this.getProp(propertyName)
return async (...args) => {
const originalFn = await originalFnPromise
try {
return await originalFn(...args)
} catch (err) {
logger.error(`[ctx] Error calling ${String(propertyName)}`)
logger.error(err)
throw err
}
}
}
|
A simple helper to improve DX and UX when calling functions.
This function allows you to request a function from AppContext without blocking until you actually need to call it.
@param {ContextProperties} propertyName
@returns {(...args: unknown[]) => Promise<unknown>}
|
getFn
|
javascript
|
ipfs/ipfs-desktop
|
src/context.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/context.js
|
MIT
|
_resolvePropToValue (propertyName, value) {
let deferred = this._promiseMap.get(propertyName)
if (deferred == null) {
logger.info(`[ctx] No promise found for ${String(propertyName)}`)
deferred = this._createDeferredForProp(propertyName)
}
logger.info(`[ctx] Resolving promise for ${String(propertyName)}`)
deferred.resolve(value)
}
|
Gets existing promise and resolves it with the given value.
@private
@template T
@param {ContextProperties} propertyName
@param {T} value
@returns {void}
|
_resolvePropToValue
|
javascript
|
ipfs/ipfs-desktop
|
src/context.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/context.js
|
MIT
|
_createDeferredForProp (propertyName) {
let deferred = this._promiseMap.get(propertyName)
if (deferred == null) {
deferred = pDefer()
this._promiseMap.set(propertyName, deferred)
}
// @ts-expect-error - Need to fix generics
return deferred
}
|
Returns the existing promise for a property if it exists.
If not, one is created and set in the `_promiseMap`, then returned
@private
@template T
@param {ContextProperties} propertyName
@returns {pDefer.DeferredPromise<T>}
|
_createDeferredForProp
|
javascript
|
ipfs/ipfs-desktop
|
src/context.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/context.js
|
MIT
|
function icon (status) {
const dir = path.resolve(path.join(__dirname, '../assets/icons/tray'))
if (IS_MAC) {
return path.join(dir, 'macos', `${status}-22Template.png`)
}
const bw = store.get(CONFIG_KEYS.MONOCHROME_TRAY_ICON, false)
if (bw) {
const theme = nativeTheme.shouldUseDarkColors ? 'dark' : 'light'
return path.join(dir, 'others', `${status}-32-${theme}.png`)
} else {
return path.join(dir, 'others', `${status}-large.png`)
}
}
|
we need to wait for i18n to be ready before we translate the tray menu
@type {boolean}
|
icon
|
javascript
|
ipfs/ipfs-desktop
|
src/tray.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/tray.js
|
MIT
|
function getKuboRepositoryPath () {
let ipfsPath = store.get('ipfsConfig.path')
if (!ipfsPath) {
ipfsPath = process.env.IPFS_PATH
if (!ipfsPath) {
const homeDir = os.homedir()
ipfsPath = path.join(homeDir, '.ipfs')
}
}
return ipfsPath
}
|
we need to wait for i18n to be ready before we translate the tray menu
@type {boolean}
|
getKuboRepositoryPath
|
javascript
|
ipfs/ipfs-desktop
|
src/tray.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/tray.js
|
MIT
|
function getConfigFilePath (ipfsd) {
return join(ipfsd.path, 'config')
}
|
Get repository configuration file path.
@param {import('ipfsd-ctl').Controller} ipfsd
@returns {string} config file path
|
getConfigFilePath
|
javascript
|
ipfs/ipfs-desktop
|
src/daemon/config.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/daemon/config.js
|
MIT
|
function getApiFilePath (ipfsd) {
return join(ipfsd.path, 'api')
}
|
Get repository api file path.
@param {import('ipfsd-ctl').Controller} ipfsd
@returns {string} api file path
|
getApiFilePath
|
javascript
|
ipfs/ipfs-desktop
|
src/daemon/config.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/daemon/config.js
|
MIT
|
function configExists (ipfsd) {
return fs.pathExistsSync(getConfigFilePath(ipfsd))
}
|
Checks if the repository configuration file exists.
@param {import('ipfsd-ctl').Controller} ipfsd
@returns {boolean} true if config file exists
|
configExists
|
javascript
|
ipfs/ipfs-desktop
|
src/daemon/config.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/daemon/config.js
|
MIT
|
function apiFileExists (ipfsd) {
return fs.pathExistsSync(getApiFilePath(ipfsd))
}
|
Checks if the repository api file exists.
@param {import('ipfsd-ctl').Controller} ipfsd
@returns {boolean} true if config file exists
|
apiFileExists
|
javascript
|
ipfs/ipfs-desktop
|
src/daemon/config.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/daemon/config.js
|
MIT
|
function readConfigFile (ipfsd) {
return fs.readJsonSync(getConfigFilePath(ipfsd))
}
|
Reads the repository configuration file.
@param {import('ipfsd-ctl').Controller} ipfsd
@returns {any} the configuration
|
readConfigFile
|
javascript
|
ipfs/ipfs-desktop
|
src/daemon/config.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/daemon/config.js
|
MIT
|
function writeConfigFile (ipfsd, config) {
fs.writeJsonSync(getConfigFilePath(ipfsd), config, { spaces: 2 })
}
|
Writes the repository configuration file.
@param {import('ipfsd-ctl').Controller} ipfsd
@param {Object<string, any>} config
|
writeConfigFile
|
javascript
|
ipfs/ipfs-desktop
|
src/daemon/config.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/daemon/config.js
|
MIT
|
function applyDefaults (ipfsd) {
const config = readConfigFile(ipfsd)
// Ensure strict CORS checking
// See: https://github.com/ipfs/js-ipfsd-ctl/issues/333
config.API = { HTTPHeaders: {} }
config.Swarm = config.Swarm ?? {}
config.Swarm.DisableNatPortMap = false // uPnP
config.Swarm.ConnMgr = config.Swarm.ConnMgr ?? {}
config.Discovery = config.Discovery ?? {}
config.Discovery.MDNS = config.Discovery.MDNS ?? {}
config.Discovery.MDNS.Enabled = true
config.AutoTLS = config.AutoTLS ?? {}
config.AutoTLS.Enabled = true
writeConfigFile(ipfsd, config)
}
|
Set default minimum and maximum of connections to maintain
by default. This must only be called for repositories created
by IPFS Desktop. Existing ones shall remain intact.
@param {import('ipfsd-ctl').Controller} ipfsd
|
applyDefaults
|
javascript
|
ipfs/ipfs-desktop
|
src/daemon/config.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/daemon/config.js
|
MIT
|
function parseMultiaddr (addr) {
return addr.includes('/http')
? multiaddr(addr)
: multiaddr(addr).encapsulate('/http')
}
|
Parses multiaddr from the configuration.
@param {string} addr
@returns {import('multiaddr').Multiaddr}
|
parseMultiaddr
|
javascript
|
ipfs/ipfs-desktop
|
src/daemon/config.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/daemon/config.js
|
MIT
|
function getHttpPort (addrs) {
let httpUrl = null
if (Array.isArray(addrs)) {
httpUrl = addrs.find(v => v.includes('127.0.0.1'))
} else {
httpUrl = addrs
}
const gw = parseMultiaddr(httpUrl)
return gw.nodeAddress().port
}
|
Get local HTTP port.
@param {array|string} addrs
@returns {number} the port
|
getHttpPort
|
javascript
|
ipfs/ipfs-desktop
|
src/daemon/config.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/daemon/config.js
|
MIT
|
function migrateConfig (ipfsd) {
// Bump revision number when new migration rule is added
const REVISION = 6
const REVISION_KEY = 'daemonConfigRevision'
const CURRENT_REVISION = store.get(REVISION_KEY, 0)
// Migration is applied only once per revision
if (CURRENT_REVISION >= REVISION) return
// Read config
let config = null
let changed = false
try {
config = readConfigFile(ipfsd)
} catch (err) {
// This is a best effort check, dont blow up here, that should happen else where.
logger.error(`[daemon] migrateConfig: error reading config file: ${err.message || err}`)
return
}
if (CURRENT_REVISION < 1) {
// Cleanup https://github.com/ipfs-shipyard/ipfs-desktop/issues/1631
if (config.Discovery && config.Discovery.MDNS && config.Discovery.MDNS.enabled) {
config.Discovery.MDNS.Enabled = config.Discovery.MDNS.Enabled || true
delete config.Discovery.MDNS.enabled
changed = true
}
}
if (CURRENT_REVISION < 3) {
const api = config.API || {}
const httpHeaders = api.HTTPHeaders || {}
const accessControlAllowOrigin = httpHeaders['Access-Control-Allow-Origin'] || []
const addURL = url => {
if (!accessControlAllowOrigin.includes(url)) {
accessControlAllowOrigin.push(url)
return true
}
return false
}
const addedWebUI = addURL('https://webui.ipfs.io')
const addedGw = addURL(`http://webui.ipfs.io.ipns.localhost:${getGatewayPort(config)}`)
if (addedWebUI || addedGw) {
httpHeaders['Access-Control-Allow-Origin'] = accessControlAllowOrigin
api.HTTPHeaders = httpHeaders
config.API = api
changed = true
}
}
if (CURRENT_REVISION < 4) {
if (config.Swarm && config.Swarm.ConnMgr) {
// lower ConnMgr https://github.com/ipfs/ipfs-desktop/issues/2039
const { GracePeriod, LowWater, HighWater } = config.Swarm.ConnMgr
if (GracePeriod === '300s') {
config.Swarm.ConnMgr.GracePeriod = '1m'
changed = true
}
if (LowWater > 20) {
config.Swarm.ConnMgr.LowWater = 20
changed = true
}
if (HighWater > 40) {
config.Swarm.ConnMgr.HighWater = 40
changed = true
}
}
}
if (CURRENT_REVISION < 5) {
if (config.Swarm && config.Swarm.ConnMgr) {
const { GracePeriod, LowWater, HighWater } = config.Swarm.ConnMgr
// Only touch config if user runs old defaults hardcoded in ipfs-desktop
if (GracePeriod === '1m' && LowWater === 20 && HighWater === 40) {
config.Swarm.ConnMgr = {} // remove overrides, use defaults from Kubo https://github.com/ipfs/kubo/pull/9483
changed = true
}
}
}
if (CURRENT_REVISION < 6) {
// Enable AutoTLS if there is no explicit user preference
if (config.AutoTLS === undefined) {
config.AutoTLS = {}
changed = true
}
if (config.AutoTLS.Enabled === undefined) {
config.AutoTLS.Enabled = true
changed = true
}
}
if (changed) {
try {
writeConfigFile(ipfsd, config)
store.safeSet(REVISION_KEY, REVISION)
} catch (err) {
logger.error(`[daemon] migrateConfig: error writing config file: ${err.message || err}`)
return
}
}
store.safeSet(REVISION_KEY, REVISION)
}
|
Apply one-time updates to the config of IPFS node. This is the place
where we execute fixes and performance tweaks for existing users.
@param {import('ipfsd-ctl').Controller} ipfsd
|
migrateConfig
|
javascript
|
ipfs/ipfs-desktop
|
src/daemon/config.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/daemon/config.js
|
MIT
|
addURL = url => {
if (!accessControlAllowOrigin.includes(url)) {
accessControlAllowOrigin.push(url)
return true
}
return false
}
|
Apply one-time updates to the config of IPFS node. This is the place
where we execute fixes and performance tweaks for existing users.
@param {import('ipfsd-ctl').Controller} ipfsd
|
addURL
|
javascript
|
ipfs/ipfs-desktop
|
src/daemon/config.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/daemon/config.js
|
MIT
|
addURL = url => {
if (!accessControlAllowOrigin.includes(url)) {
accessControlAllowOrigin.push(url)
return true
}
return false
}
|
Apply one-time updates to the config of IPFS node. This is the place
where we execute fixes and performance tweaks for existing users.
@param {import('ipfsd-ctl').Controller} ipfsd
|
addURL
|
javascript
|
ipfs/ipfs-desktop
|
src/daemon/config.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/daemon/config.js
|
MIT
|
async function checkIfAddrIsDaemon (addr) {
const options = {
timeout: 3000, // 3s is plenty for localhost request
method: 'POST',
host: addr.address,
port: addr.port,
path: '/api/v0/refs?arg=/ipfs/QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'
}
return new Promise(resolve => {
const req = http.request(options, function (r) {
resolve(r.statusCode === 200)
})
req.on('error', () => {
resolve(false)
})
req.end()
})
}
|
Checks if the given address is a daemon address.
@param {{ family: 4 | 6, address: string, port: number }} addr
@returns {Promise<boolean>}
|
checkIfAddrIsDaemon
|
javascript
|
ipfs/ipfs-desktop
|
src/daemon/config.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/daemon/config.js
|
MIT
|
findFreePort = async (port) => {
port = Math.max(port, 1024)
return portfinder.getPortPromise({ port })
}
|
Find free close to port.
@param {number} port
@returns {Promise<number>}
|
findFreePort
|
javascript
|
ipfs/ipfs-desktop
|
src/daemon/config.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/daemon/config.js
|
MIT
|
findFreePort = async (port) => {
port = Math.max(port, 1024)
return portfinder.getPortPromise({ port })
}
|
Find free close to port.
@param {number} port
@returns {Promise<number>}
|
findFreePort
|
javascript
|
ipfs/ipfs-desktop
|
src/daemon/config.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/daemon/config.js
|
MIT
|
async function checkPortsArray (ipfsd, addrs) {
addrs = addrs.filter(Boolean)
for (const addr of addrs) {
const ma = parseMultiaddr(addr)
const port = ma.nodeAddress().port
if (port === 0) {
continue
}
const isDaemon = await checkIfAddrIsDaemon(ma.nodeAddress())
if (isDaemon) {
continue
}
const freePort = await findFreePort(port)
if (port !== freePort) {
const openConfig = dialogs.multipleBusyPortsDialog()
if (openConfig) {
shell.openPath(getConfigFilePath(ipfsd))
}
return false
}
}
return true
}
|
Check if all the ports in the array are available.
@param {import('ipfsd-ctl').Controller} ipfsd
@param {string[]} addrs
@returns {Promise<boolean>}
|
checkPortsArray
|
javascript
|
ipfs/ipfs-desktop
|
src/daemon/config.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/daemon/config.js
|
MIT
|
async function checkPorts (ipfsd) {
const config = readConfigFile(ipfsd)
const apiIsArr = Array.isArray(config.Addresses.API)
const gatewayIsArr = Array.isArray(config.Addresses.Gateway)
if (apiIsArr || gatewayIsArr) {
logger.info('[daemon] custom configuration with array of API or Gateway addrs')
return checkPortsArray(ipfsd, [].concat(config.Addresses.API, config.Addresses.Gateway))
}
const configApiMa = parseMultiaddr(config.Addresses.API)
const configGatewayMa = parseMultiaddr(config.Addresses.Gateway)
const isApiMaDaemon = await checkIfAddrIsDaemon(configApiMa.nodeAddress())
const isGatewayMaDaemon = await checkIfAddrIsDaemon(configGatewayMa.nodeAddress())
if (isApiMaDaemon && isGatewayMaDaemon) {
logger.info('[daemon] ports busy by a daemon')
return true
}
const apiPort = configApiMa.nodeAddress().port
const gatewayPort = configGatewayMa.nodeAddress().port
const freeGatewayPort = await findFreePort(gatewayPort)
let freeApiPort = await findFreePort(apiPort)
// ensure the picked ports are different
while (freeApiPort === freeGatewayPort) {
freeApiPort = await findFreePort(freeApiPort + 1)
}
const busyApiPort = apiPort !== freeApiPort
const busyGatewayPort = gatewayPort !== freeGatewayPort
if (!busyApiPort && !busyGatewayPort) {
return true
}
// two "0" in config mean "pick free ports without any prompt"
let promptUser = (apiPort !== 0 || gatewayPort !== 0)
if (process.env.NODE_ENV === 'test' || process.env.CI != null) {
logger.info('[daemon] CI or TEST mode, skipping busyPortDialog')
promptUser = false
}
if (promptUser) {
let useAlternativePorts = null
if (busyApiPort && busyGatewayPort) {
logger.info('[daemon] api and gateway ports busy')
useAlternativePorts = dialogs.busyPortsDialog(apiPort, freeApiPort, gatewayPort, freeGatewayPort)
} else if (busyApiPort) {
logger.info('[daemon] api port busy')
useAlternativePorts = dialogs.busyPortDialog(apiPort, freeApiPort)
} else {
logger.info('[daemon] gateway port busy')
useAlternativePorts = dialogs.busyPortDialog(gatewayPort, freeGatewayPort)
}
if (!useAlternativePorts) {
return false
}
}
if (busyApiPort) {
config.Addresses.API = config.Addresses.API.replace(apiPort.toString(), freeApiPort.toString())
}
if (busyGatewayPort) {
config.Addresses.Gateway = config.Addresses.Gateway.replace(gatewayPort.toString(), freeGatewayPort.toString())
}
writeConfigFile(ipfsd, config)
logger.info('[daemon] ports updated')
return true
}
|
Check if ports are available and handle it. Returns
true if ports are cleared for IPFS to start.
@param {import('ipfsd-ctl').Controller} ipfsd
@returns {Promise<boolean>}
|
checkPorts
|
javascript
|
ipfs/ipfs-desktop
|
src/daemon/config.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/daemon/config.js
|
MIT
|
function checkRepositoryAndConfiguration (ipfsd) {
if (!fs.pathExistsSync(ipfsd.path)) {
// If the repository doesn't exist, skip verification.
return true
}
try {
const stats = fs.statSync(ipfsd.path)
if (!stats.isDirectory()) {
logger.error(`${ipfsd.path} must be a directory`)
dialogs.repositoryMustBeDirectoryDialog(ipfsd.path)
return false
}
if (!apiFileExists(ipfsd)) {
if (!configExists(ipfsd)) {
// Config is generated automatically if it doesn't exist.
logger.error(`configuration does not exist at ${ipfsd.path}`)
dialogs.repositoryConfigurationIsMissingDialog(ipfsd.path)
return true
}
// This should catch errors such having no configuration file,
// IPFS_DIR not being a directory, or the configuration file
// being corrupted.
readConfigFile(ipfsd)
}
const swarmKeyPath = join(ipfsd.path, 'swarm.key')
if (fs.pathExistsSync(swarmKeyPath)) {
// IPFS Desktop does not support private network IPFS repositories.
dialogs.repositoryIsPrivateDialog(ipfsd.path)
return false
}
return true
} catch (e) {
// Save to error.log
logger.error(e)
dialogs.repositoryIsInvalidDialog(ipfsd.path)
return false
}
}
|
Checks if the repository and the configuration file are valid.
@param {import('ipfsd-ctl').Controller} ipfsd
@returns {boolean}
|
checkRepositoryAndConfiguration
|
javascript
|
ipfs/ipfs-desktop
|
src/daemon/config.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/daemon/config.js
|
MIT
|
function getIpfsBinPath () {
return process.env.IPFS_GO_EXEC ||
getCustomBinary() ||
require('kubo')
.path()
.replace('app.asar', 'app.asar.unpacked')
}
|
Get the IPFS binary file path.
@returns {string}
|
getIpfsBinPath
|
javascript
|
ipfs/ipfs-desktop
|
src/daemon/daemon.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/daemon/daemon.js
|
MIT
|
async function getIpfsd (flags, path) {
const ipfsBin = getIpfsBinPath()
const ipfsd = await Ctl.createController({
ipfsHttpModule: require('ipfs-http-client'),
ipfsBin,
ipfsOptions: {
repo: path
},
remote: false,
disposable: false,
test: false,
args: flags
})
// Checks if the repository is valid to use with IPFS Desktop. If not,
// we quit the app. We assume that checkRepositoryAndConfiguration
// presents any dialog explaining the situation.
if (!checkRepositoryAndConfiguration(ipfsd)) {
return null
}
let isRemote = false
if (configExists(ipfsd)) {
migrateConfig(ipfsd)
} else {
// If config does not exist, but $IPFS_PATH/api exists
// then it is a remote repository.
isRemote = apiFileExists(ipfsd)
if (!isRemote) {
// It's a new repository!
await ipfsd.init()
applyDefaults(ipfsd)
}
}
if (!isRemote) {
// Check if ports are free and we're clear to start IPFS.
// If not, we return null.
if (!await checkPorts(ipfsd)) {
return null
}
}
return ipfsd
}
|
Gets the IPFS daemon controller. If null is returned,
it means that the repository or some configuration is wrong
and IPFS Desktop should quit.
@param {string[]} flags
@param {string} path
@returns {Promise<import('ipfsd-ctl').Controller|null>}
|
getIpfsd
|
javascript
|
ipfs/ipfs-desktop
|
src/daemon/daemon.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/daemon/daemon.js
|
MIT
|
function listenToIpfsLogs (ipfsd, callback) {
let stdout, stderr
const listener = data => {
callback(data.toString())
}
const interval = setInterval(() => {
if (!ipfsd.subprocess) {
return
}
stdout = ipfsd.subprocess.stdout
stderr = ipfsd.subprocess.stderr
stdout.on('data', listener)
stderr.on('data', listener)
clearInterval(interval)
}, 20)
const stop = () => {
clearInterval(interval)
if (stdout) stdout.removeListener('data', listener)
if (stderr) stderr.removeListener('data', listener)
}
return stop
}
|
Gets the IPFS daemon controller. If null is returned,
it means that the repository or some configuration is wrong
and IPFS Desktop should quit.
@param {string[]} flags
@param {string} path
@returns {Promise<import('ipfsd-ctl').Controller|null>}
|
listenToIpfsLogs
|
javascript
|
ipfs/ipfs-desktop
|
src/daemon/daemon.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/daemon/daemon.js
|
MIT
|
stop = () => {
clearInterval(interval)
if (stdout) stdout.removeListener('data', listener)
if (stderr) stderr.removeListener('data', listener)
}
|
Gets the IPFS daemon controller. If null is returned,
it means that the repository or some configuration is wrong
and IPFS Desktop should quit.
@param {string[]} flags
@param {string} path
@returns {Promise<import('ipfsd-ctl').Controller|null>}
|
stop
|
javascript
|
ipfs/ipfs-desktop
|
src/daemon/daemon.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/daemon/daemon.js
|
MIT
|
stop = () => {
clearInterval(interval)
if (stdout) stdout.removeListener('data', listener)
if (stderr) stderr.removeListener('data', listener)
}
|
Gets the IPFS daemon controller. If null is returned,
it means that the repository or some configuration is wrong
and IPFS Desktop should quit.
@param {string[]} flags
@param {string} path
@returns {Promise<import('ipfsd-ctl').Controller|null>}
|
stop
|
javascript
|
ipfs/ipfs-desktop
|
src/daemon/daemon.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/daemon/daemon.js
|
MIT
|
async function startIpfsWithLogs (ipfsd) {
let err, id, migrationPrompt
let isMigrating, isErrored, isFinished
let logs = ''
const isSpawnedDaemonDead = (ipfsd) => {
if (typeof ipfsd.subprocess === 'undefined') throw new Error('undefined ipfsd.subprocess, unable to reason about startup errors')
if (ipfsd.subprocess === null) return false // not spawned yet or remote
if (ipfsd.subprocess?.failed) return true // explicit failure
// detect when spawned ipfsd process is gone/dead
// by inspecting its pid - it should be alive
const { pid } = ipfsd.subprocess
try {
// signal 0 throws if process is missing, noop otherwise
process.kill(pid, 0)
return false
} catch (e) {
return true
}
}
const stopListening = listenToIpfsLogs(ipfsd, data => {
logs += data.toString()
const line = data.toLowerCase()
isMigrating = isMigrating || line.includes('migration')
isErrored = isErrored || isSpawnedDaemonDead(ipfsd)
isFinished = isFinished || line.includes('daemon is ready')
if (!isMigrating && !isErrored) {
return
}
if (!migrationPrompt) {
logger.info('[daemon] ipfs data store is migrating')
migrationPrompt = showMigrationPrompt(logs, isErrored, isFinished)
return
}
if (isErrored || isFinished) {
// forced show on error or when finished,
// because user could close it to run in background
migrationPrompt.loadWindow(logs, isErrored, isFinished)
} else { // update progress if the window is still around
migrationPrompt.update(logs)
}
})
try {
await ipfsd.start()
const idRes = await ipfsd.api.id()
id = idRes.id
} catch (e) {
err = e
} finally {
// stop monitoring daemon output - we only care about startup phase
stopListening()
// Show startup error using the same UI as migrations.
// This is catch-all that will show stdout/stderr of ipfs daemon
// that failed to start, allowing user to self-diagnose or report issue.
isErrored = isErrored || isSpawnedDaemonDead(ipfsd)
if (isErrored) { // save daemon output to error.log
if (logs.trim().length === 0) {
logs = 'ipfs daemon failed to start and produced no output (see error.log for details)'
}
logger.error(logs)
if (migrationPrompt) {
migrationPrompt.loadWindow(logs, isErrored, isFinished)
} else {
showMigrationPrompt(logs, isErrored, isFinished)
}
}
}
return {
err, id, logs
}
}
|
Start IPFS, collects the logs, detects errors and migrations.
@param {import('ipfsd-ctl').Controller} ipfsd
@returns {Promise<IpfsLogs>}
|
startIpfsWithLogs
|
javascript
|
ipfs/ipfs-desktop
|
src/daemon/daemon.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/daemon/daemon.js
|
MIT
|
isSpawnedDaemonDead = (ipfsd) => {
if (typeof ipfsd.subprocess === 'undefined') throw new Error('undefined ipfsd.subprocess, unable to reason about startup errors')
if (ipfsd.subprocess === null) return false // not spawned yet or remote
if (ipfsd.subprocess?.failed) return true // explicit failure
// detect when spawned ipfsd process is gone/dead
// by inspecting its pid - it should be alive
const { pid } = ipfsd.subprocess
try {
// signal 0 throws if process is missing, noop otherwise
process.kill(pid, 0)
return false
} catch (e) {
return true
}
}
|
Start IPFS, collects the logs, detects errors and migrations.
@param {import('ipfsd-ctl').Controller} ipfsd
@returns {Promise<IpfsLogs>}
|
isSpawnedDaemonDead
|
javascript
|
ipfs/ipfs-desktop
|
src/daemon/daemon.js
|
https://github.com/ipfs/ipfs-desktop/blob/master/src/daemon/daemon.js
|
MIT
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.