This commit is contained in:
154
node_modules/vscode-languageserver/lib/common/semanticTokens.js
generated
vendored
Normal file
154
node_modules/vscode-languageserver/lib/common/semanticTokens.js
generated
vendored
Normal file
@@ -0,0 +1,154 @@
|
||||
"use strict";
|
||||
/* --------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
* ------------------------------------------------------------------------------------------ */
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.SemanticTokensBuilder = exports.SemanticTokensDiff = exports.SemanticTokensFeature = void 0;
|
||||
const vscode_languageserver_protocol_1 = require("vscode-languageserver-protocol");
|
||||
const SemanticTokensFeature = (Base) => {
|
||||
return class extends Base {
|
||||
get semanticTokens() {
|
||||
return {
|
||||
refresh: () => {
|
||||
return this.connection.sendRequest(vscode_languageserver_protocol_1.SemanticTokensRefreshRequest.type);
|
||||
},
|
||||
on: (handler) => {
|
||||
const type = vscode_languageserver_protocol_1.SemanticTokensRequest.type;
|
||||
return this.connection.onRequest(type, (params, cancel) => {
|
||||
return handler(params, cancel, this.attachWorkDoneProgress(params), this.attachPartialResultProgress(type, params));
|
||||
});
|
||||
},
|
||||
onDelta: (handler) => {
|
||||
const type = vscode_languageserver_protocol_1.SemanticTokensDeltaRequest.type;
|
||||
return this.connection.onRequest(type, (params, cancel) => {
|
||||
return handler(params, cancel, this.attachWorkDoneProgress(params), this.attachPartialResultProgress(type, params));
|
||||
});
|
||||
},
|
||||
onRange: (handler) => {
|
||||
const type = vscode_languageserver_protocol_1.SemanticTokensRangeRequest.type;
|
||||
return this.connection.onRequest(type, (params, cancel) => {
|
||||
return handler(params, cancel, this.attachWorkDoneProgress(params), this.attachPartialResultProgress(type, params));
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
};
|
||||
exports.SemanticTokensFeature = SemanticTokensFeature;
|
||||
class SemanticTokensDiff {
|
||||
constructor(originalSequence, modifiedSequence) {
|
||||
this.originalSequence = originalSequence;
|
||||
this.modifiedSequence = modifiedSequence;
|
||||
}
|
||||
computeDiff() {
|
||||
const originalLength = this.originalSequence.length;
|
||||
const modifiedLength = this.modifiedSequence.length;
|
||||
let startIndex = 0;
|
||||
while (startIndex < modifiedLength && startIndex < originalLength && this.originalSequence[startIndex] === this.modifiedSequence[startIndex]) {
|
||||
startIndex++;
|
||||
}
|
||||
if (startIndex < modifiedLength && startIndex < originalLength) {
|
||||
let originalEndIndex = originalLength - 1;
|
||||
let modifiedEndIndex = modifiedLength - 1;
|
||||
while (originalEndIndex >= startIndex && modifiedEndIndex >= startIndex && this.originalSequence[originalEndIndex] === this.modifiedSequence[modifiedEndIndex]) {
|
||||
originalEndIndex--;
|
||||
modifiedEndIndex--;
|
||||
}
|
||||
// if one moved behind the start index move them forward again
|
||||
if (originalEndIndex < startIndex || modifiedEndIndex < startIndex) {
|
||||
originalEndIndex++;
|
||||
modifiedEndIndex++;
|
||||
}
|
||||
const deleteCount = originalEndIndex - startIndex + 1;
|
||||
const newData = this.modifiedSequence.slice(startIndex, modifiedEndIndex + 1);
|
||||
// If we moved behind the start index we could have missed a simple delete.
|
||||
if (newData.length === 1 && newData[0] === this.originalSequence[originalEndIndex]) {
|
||||
return [
|
||||
{ start: startIndex, deleteCount: deleteCount - 1 }
|
||||
];
|
||||
}
|
||||
else {
|
||||
return [
|
||||
{ start: startIndex, deleteCount, data: newData }
|
||||
];
|
||||
}
|
||||
}
|
||||
else if (startIndex < modifiedLength) {
|
||||
return [
|
||||
{ start: startIndex, deleteCount: 0, data: this.modifiedSequence.slice(startIndex) }
|
||||
];
|
||||
}
|
||||
else if (startIndex < originalLength) {
|
||||
return [
|
||||
{ start: startIndex, deleteCount: originalLength - startIndex }
|
||||
];
|
||||
}
|
||||
else {
|
||||
// The two arrays are the same.
|
||||
return [];
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.SemanticTokensDiff = SemanticTokensDiff;
|
||||
class SemanticTokensBuilder {
|
||||
constructor() {
|
||||
this._prevData = undefined;
|
||||
this.initialize();
|
||||
}
|
||||
initialize() {
|
||||
this._id = Date.now();
|
||||
this._prevLine = 0;
|
||||
this._prevChar = 0;
|
||||
this._data = [];
|
||||
this._dataLen = 0;
|
||||
}
|
||||
push(line, char, length, tokenType, tokenModifiers) {
|
||||
let pushLine = line;
|
||||
let pushChar = char;
|
||||
if (this._dataLen > 0) {
|
||||
pushLine -= this._prevLine;
|
||||
if (pushLine === 0) {
|
||||
pushChar -= this._prevChar;
|
||||
}
|
||||
}
|
||||
this._data[this._dataLen++] = pushLine;
|
||||
this._data[this._dataLen++] = pushChar;
|
||||
this._data[this._dataLen++] = length;
|
||||
this._data[this._dataLen++] = tokenType;
|
||||
this._data[this._dataLen++] = tokenModifiers;
|
||||
this._prevLine = line;
|
||||
this._prevChar = char;
|
||||
}
|
||||
get id() {
|
||||
return this._id.toString();
|
||||
}
|
||||
previousResult(id) {
|
||||
if (this.id === id) {
|
||||
this._prevData = this._data;
|
||||
}
|
||||
this.initialize();
|
||||
}
|
||||
build() {
|
||||
this._prevData = undefined;
|
||||
return {
|
||||
resultId: this.id,
|
||||
data: this._data
|
||||
};
|
||||
}
|
||||
canBuildEdits() {
|
||||
return this._prevData !== undefined;
|
||||
}
|
||||
buildEdits() {
|
||||
if (this._prevData !== undefined) {
|
||||
return {
|
||||
resultId: this.id,
|
||||
edits: (new SemanticTokensDiff(this._prevData, this._data)).computeDiff()
|
||||
};
|
||||
}
|
||||
else {
|
||||
return this.build();
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.SemanticTokensBuilder = SemanticTokensBuilder;
|
||||
Reference in New Issue
Block a user