wip: restructure

This commit is contained in:
Noah Santschi-Cooney 2022-08-14 12:13:20 +01:00
parent d8d77ac600
commit 786e09bdcf
No known key found for this signature in database
GPG key ID: 3B22282472C8AE48
105 changed files with 5156 additions and 3268 deletions

1
.vscode/launch.json vendored
View file

@ -4,6 +4,7 @@
"configurations": [
{
"type": "extensionHost",
"trace": true,
"request": "launch",
"name": "Launch Client",
"runtimeExecutable": "${execPath}",

152
client/package-lock.json generated
View file

@ -10,14 +10,14 @@
"adm-zip": "^0.5.9",
"encoding": "^0.1.13",
"node-fetch": "^2.6.7",
"vscode-languageclient": "^6.1.4"
"vscode-languageclient": "^8.0.0"
},
"devDependencies": {
"@rollup/plugin-commonjs": "^21.0.2",
"@rollup/plugin-node-resolve": "^13.1.3",
"@types/adm-zip": "^0.4.34",
"@types/node-fetch": "^2.6.1",
"@types/vscode": "^1.65.0",
"@types/vscode": "^1.70.0",
"rollup": "^2.70.1"
}
},
@ -134,9 +134,9 @@
}
},
"node_modules/@types/vscode": {
"version": "1.65.0",
"resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.65.0.tgz",
"integrity": "sha512-wQhExnh2nEzpjDMSKhUvnNmz3ucpd3E+R7wJkOhBNK3No6fG3VUdmVmMOKD0A8NDZDDDiQcLNxe3oGmX5SjJ5w==",
"version": "1.70.0",
"resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.70.0.tgz",
"integrity": "sha512-3/9Fz0F2eBgwciazc94Ien+9u1elnjFg9YAhvAb3qDy/WeFWD9VrOPU7CIytryOVUdbxus8uzL4VZYONA0gDtA==",
"dev": true
},
"node_modules/adm-zip": {
@ -156,14 +156,12 @@
"node_modules/balanced-match": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
"integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=",
"dev": true
"integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c="
},
"node_modules/brace-expansion": {
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"dev": true,
"dependencies": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
@ -202,8 +200,7 @@
"node_modules/concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=",
"dev": true
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s="
},
"node_modules/deepmerge": {
"version": "4.2.2",
@ -362,6 +359,17 @@
"@types/estree": "*"
}
},
"node_modules/lru-cache": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
"integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
"dependencies": {
"yallist": "^4.0.0"
},
"engines": {
"node": ">=10"
}
},
"node_modules/magic-string": {
"version": "0.25.7",
"resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.25.7.tgz",
@ -396,7 +404,6 @@
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"dev": true,
"dependencies": {
"brace-expansion": "^1.1.7"
},
@ -491,11 +498,17 @@
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
"node_modules/semver": {
"version": "6.3.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
"integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
"version": "7.3.7",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz",
"integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==",
"dependencies": {
"lru-cache": "^6.0.0"
},
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/sourcemap-codec": {
@ -510,38 +523,39 @@
"integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o="
},
"node_modules/vscode-jsonrpc": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-5.0.1.tgz",
"integrity": "sha512-JvONPptw3GAQGXlVV2utDcHx0BiY34FupW/kI6mZ5x06ER5DdPG/tXWMVHjTNULF5uKPOUUD0SaXg5QaubJL0A==",
"version": "8.0.2",
"resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.0.2.tgz",
"integrity": "sha512-RY7HwI/ydoC1Wwg4gJ3y6LpU9FJRZAUnTYMXthqhFXXu77ErDd/xkREpGuk4MyYkk4a+XDWAMqe0S3KkelYQEQ==",
"engines": {
"node": ">=8.0.0 || >=10.0.0"
"node": ">=14.0.0"
}
},
"node_modules/vscode-languageclient": {
"version": "6.1.4",
"resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-6.1.4.tgz",
"integrity": "sha512-EUOU+bJu6axmt0RFNo3nrglQLPXMfanbYViJee3Fbn2VuQoX0ZOI4uTYhSRvYLP2vfwTP/juV62P/mksCdTZMA==",
"version": "8.0.2",
"resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-8.0.2.tgz",
"integrity": "sha512-lHlthJtphG9gibGb/y72CKqQUxwPsMXijJVpHEC2bvbFqxmkj9LwQ3aGU9dwjBLqsX1S4KjShYppLvg1UJDF/Q==",
"dependencies": {
"semver": "^6.3.0",
"vscode-languageserver-protocol": "3.15.3"
"minimatch": "^3.0.4",
"semver": "^7.3.5",
"vscode-languageserver-protocol": "3.17.2"
},
"engines": {
"vscode": "^1.41.0"
"vscode": "^1.67.0"
}
},
"node_modules/vscode-languageserver-protocol": {
"version": "3.15.3",
"resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.15.3.tgz",
"integrity": "sha512-zrMuwHOAQRhjDSnflWdJG+O2ztMWss8GqUUB8dXLR/FPenwkiBNkMIJJYfSN6sgskvsF0rHAoBowNQfbyZnnvw==",
"version": "3.17.2",
"resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.2.tgz",
"integrity": "sha512-8kYisQ3z/SQ2kyjlNeQxbkkTNmVFoQCqkmGrzLH6A9ecPlgTbp3wDTnUNqaUxYr4vlAcloxx8zwy7G5WdguYNg==",
"dependencies": {
"vscode-jsonrpc": "^5.0.1",
"vscode-languageserver-types": "3.15.1"
"vscode-jsonrpc": "8.0.2",
"vscode-languageserver-types": "3.17.2"
}
},
"node_modules/vscode-languageserver-types": {
"version": "3.15.1",
"resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.15.1.tgz",
"integrity": "sha512-+a9MPUQrNGRrGU630OGbYVQ+11iOIovjCkqxajPa9w57Sd5ruK8WQNsslzpa0x/QJqC8kRc2DUxWjIFwoNm4ZQ=="
"version": "3.17.2",
"resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.2.tgz",
"integrity": "sha512-zHhCWatviizPIq9B7Vh9uvrH6x3sK8itC84HkamnBWoDFJtzBf7SWlpLCZUit72b3os45h6RWQNC9xHRDF8dRA=="
},
"node_modules/webidl-conversions": {
"version": "3.0.1",
@ -562,6 +576,11 @@
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=",
"dev": true
},
"node_modules/yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
}
},
"dependencies": {
@ -659,9 +678,9 @@
}
},
"@types/vscode": {
"version": "1.65.0",
"resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.65.0.tgz",
"integrity": "sha512-wQhExnh2nEzpjDMSKhUvnNmz3ucpd3E+R7wJkOhBNK3No6fG3VUdmVmMOKD0A8NDZDDDiQcLNxe3oGmX5SjJ5w==",
"version": "1.70.0",
"resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.70.0.tgz",
"integrity": "sha512-3/9Fz0F2eBgwciazc94Ien+9u1elnjFg9YAhvAb3qDy/WeFWD9VrOPU7CIytryOVUdbxus8uzL4VZYONA0gDtA==",
"dev": true
},
"adm-zip": {
@ -678,14 +697,12 @@
"balanced-match": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
"integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=",
"dev": true
"integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c="
},
"brace-expansion": {
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"dev": true,
"requires": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
@ -715,8 +732,7 @@
"concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=",
"dev": true
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s="
},
"deepmerge": {
"version": "4.2.2",
@ -844,6 +860,14 @@
"@types/estree": "*"
}
},
"lru-cache": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
"integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
"requires": {
"yallist": "^4.0.0"
}
},
"magic-string": {
"version": "0.25.7",
"resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.25.7.tgz",
@ -872,7 +896,6 @@
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"dev": true,
"requires": {
"brace-expansion": "^1.1.7"
}
@ -935,9 +958,12 @@
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
"semver": {
"version": "6.3.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
"integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw=="
"version": "7.3.7",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz",
"integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==",
"requires": {
"lru-cache": "^6.0.0"
}
},
"sourcemap-codec": {
"version": "1.4.8",
@ -951,32 +977,33 @@
"integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o="
},
"vscode-jsonrpc": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-5.0.1.tgz",
"integrity": "sha512-JvONPptw3GAQGXlVV2utDcHx0BiY34FupW/kI6mZ5x06ER5DdPG/tXWMVHjTNULF5uKPOUUD0SaXg5QaubJL0A=="
"version": "8.0.2",
"resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.0.2.tgz",
"integrity": "sha512-RY7HwI/ydoC1Wwg4gJ3y6LpU9FJRZAUnTYMXthqhFXXu77ErDd/xkREpGuk4MyYkk4a+XDWAMqe0S3KkelYQEQ=="
},
"vscode-languageclient": {
"version": "6.1.4",
"resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-6.1.4.tgz",
"integrity": "sha512-EUOU+bJu6axmt0RFNo3nrglQLPXMfanbYViJee3Fbn2VuQoX0ZOI4uTYhSRvYLP2vfwTP/juV62P/mksCdTZMA==",
"version": "8.0.2",
"resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-8.0.2.tgz",
"integrity": "sha512-lHlthJtphG9gibGb/y72CKqQUxwPsMXijJVpHEC2bvbFqxmkj9LwQ3aGU9dwjBLqsX1S4KjShYppLvg1UJDF/Q==",
"requires": {
"semver": "^6.3.0",
"vscode-languageserver-protocol": "3.15.3"
"minimatch": "^3.0.4",
"semver": "^7.3.5",
"vscode-languageserver-protocol": "3.17.2"
}
},
"vscode-languageserver-protocol": {
"version": "3.15.3",
"resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.15.3.tgz",
"integrity": "sha512-zrMuwHOAQRhjDSnflWdJG+O2ztMWss8GqUUB8dXLR/FPenwkiBNkMIJJYfSN6sgskvsF0rHAoBowNQfbyZnnvw==",
"version": "3.17.2",
"resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.2.tgz",
"integrity": "sha512-8kYisQ3z/SQ2kyjlNeQxbkkTNmVFoQCqkmGrzLH6A9ecPlgTbp3wDTnUNqaUxYr4vlAcloxx8zwy7G5WdguYNg==",
"requires": {
"vscode-jsonrpc": "^5.0.1",
"vscode-languageserver-types": "3.15.1"
"vscode-jsonrpc": "8.0.2",
"vscode-languageserver-types": "3.17.2"
}
},
"vscode-languageserver-types": {
"version": "3.15.1",
"resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.15.1.tgz",
"integrity": "sha512-+a9MPUQrNGRrGU630OGbYVQ+11iOIovjCkqxajPa9w57Sd5ruK8WQNsslzpa0x/QJqC8kRc2DUxWjIFwoNm4ZQ=="
"version": "3.17.2",
"resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.2.tgz",
"integrity": "sha512-zHhCWatviizPIq9B7Vh9uvrH6x3sK8itC84HkamnBWoDFJtzBf7SWlpLCZUit72b3os45h6RWQNC9xHRDF8dRA=="
},
"webidl-conversions": {
"version": "3.0.1",
@ -997,6 +1024,11 @@
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=",
"dev": true
},
"yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
}
}
}

View file

@ -9,14 +9,14 @@
"adm-zip": "^0.5.9",
"encoding": "^0.1.13",
"node-fetch": "^2.6.7",
"vscode-languageclient": "^6.1.4"
"vscode-languageclient": "^8.0.0"
},
"devDependencies": {
"@rollup/plugin-commonjs": "^21.0.2",
"@rollup/plugin-node-resolve": "^13.1.3",
"@types/adm-zip": "^0.4.34",
"@types/node-fetch": "^2.6.1",
"@types/vscode": "^1.65.0",
"@types/vscode": "^1.70.0",
"rollup": "^2.70.1"
}
}

View file

@ -1,6 +1,6 @@
import path = require('path')
import * as vscode from 'vscode'
import * as lsp from 'vscode-languageclient'
import * as lsp from 'vscode-languageclient/node'
import { Extension } from './extension'
import { log } from './log'
@ -10,7 +10,7 @@ export function generateGraphDot(e: Extension): Command {
return async () => {
await e.lspClient.sendRequest(lsp.ExecuteCommandRequest.type.method, {
command: 'graphDot',
arguments: [vscode.workspace.workspaceFolders[0].uri.path],
arguments: [vscode.window.activeTextEditor.document.uri.path],
})
}
}
@ -26,6 +26,7 @@ export function restartExtension(e: Extension): Command {
export function virtualMergedDocument(e: Extension): Command {
const getVirtualDocument = async (path: string): Promise<string | null> => {
let content: string = ''
log.info(path)
try {
content = await e.lspClient.sendRequest<string>(lsp.ExecuteCommandRequest.type.method, {
command: 'virtualMerge',

View file

@ -1,6 +1,6 @@
import { mkdirSync, promises as fs } from 'fs'
import * as vscode from 'vscode'
import * as lsp from 'vscode-languageclient'
import * as lsp from 'vscode-languageclient/node'
import * as commands from './commands'
import { log } from './log'
import { LanguageClient } from './lspClient'

View file

@ -1,11 +1,12 @@
import { inspect } from 'util'
import * as vscode from 'vscode'
export const lspOutputChannel = vscode.window.createOutputChannel('Minecraft Shaders Language Server')
export const lspOutputChannel = vscode.window.createOutputChannel('Minecraft Shaders LSP - Server')
export const traceOutputChannel = vscode.window.createOutputChannel('Minecraft Shaders LSP - Trace')
// from rust-analyzer https://github.com/rust-analyzer/rust-analyzer/blob/ef223b9e6439c228e0be49861efd2067c0b22af4/editors/code/src/util.ts
export const log = new class {
readonly output = vscode.window.createOutputChannel('Minecraft Shaders');
readonly output = vscode.window.createOutputChannel('Minecraft Shaders LSP - Client');
// Hint: the type [T, ...T[]] means a non-empty array
debug(...msg: [unknown, ...unknown[]]): void {

View file

@ -1,18 +1,35 @@
import { ChildProcess, spawn } from 'child_process'
import { ConfigurationTarget, workspace } from 'vscode'
import * as lsp from 'vscode-languageclient'
import * as lsp from 'vscode-languageclient/node'
import { PublishDiagnosticsNotification, StreamInfo, TelemetryEventNotification } from 'vscode-languageclient/node'
import { Extension } from './extension'
import { log, lspOutputChannel } from './log'
import { ConfigUpdateParams, statusMethod, StatusParams, updateConfigMethod } from './lspExt'
import { log, lspOutputChannel, traceOutputChannel } from './log'
import { statusMethod, StatusParams } from './lspExt'
export class LanguageClient extends lsp.LanguageClient {
private extension: Extension
constructor(ext: Extension, lspBinary: string, filewatcherGlob: string) {
super('vscode-mc-shader', 'VSCode MC Shader', {
command: lspBinary
}, {
const serverOptions = () => new Promise<ChildProcess>((resolve, reject) => {
const childProcess = spawn(lspBinary, {
env: {
'RUST_BACKTRACE': 'true',
...process.env,
}
})
childProcess.stderr.on('data', (data: Buffer) => {
lspOutputChannel.appendLine(data.toString().trimRight())
})
childProcess.on('exit', (code, signal) => {
lspOutputChannel.appendLine(`⚠️⚠️⚠️ Language server exited ` + (signal ? `from signal ${signal}` : `with exit code ${code}`) + ' ⚠️⚠️⚠️')
})
resolve(childProcess)
})
super('mcglsl', '', serverOptions, {
traceOutputChannel: traceOutputChannel,
diagnosticCollectionName: 'mcglsl',
documentSelector: [{ scheme: 'file', language: 'glsl' }],
outputChannel: lspOutputChannel,
synchronize: {
configurationSection: 'mcglsl',
fileEvents: workspace.createFileSystemWatcher(filewatcherGlob)
@ -25,17 +42,27 @@ export class LanguageClient extends lsp.LanguageClient {
}
public startServer = async (): Promise<LanguageClient> => {
this.extension.context.subscriptions.push(this.start())
// this.extension.context.subscriptions.push(this.start())
this.setTrace(lsp.Trace.Verbose)
await this.onReady()
this.extension.context.subscriptions.push(this.onNotification(PublishDiagnosticsNotification.type, (p) => {
log.error(JSON.stringify(p))
}))
this.extension.context.subscriptions.push(this.onNotification(TelemetryEventNotification.type, this.onStatusChange))
this.onNotification(updateConfigMethod, this.onUpdateConfig)
this.onNotification(statusMethod, this.onStatusChange)
await this.start()
// await this.onReady()
console.log('banana')
return this
}
onStatusChange = (params: StatusParams) => {
onStatusChange = (params: {
status: 'loading' | 'ready' | 'failed' | 'clear'
message: string
icon: string
}) => {
log.info('bananan')
switch (params.status) {
case 'loading':
case 'ready':
@ -47,10 +74,4 @@ export class LanguageClient extends lsp.LanguageClient {
break
}
}
onUpdateConfig = (params: ConfigUpdateParams) => {
for (const kv of params.kv) {
workspace.getConfiguration().update('mcglsl.' + kv.key, kv.value, ConfigurationTarget.Global)
}
}
}

View file

@ -1,4 +1,4 @@
import * as lsp from 'vscode-languageclient'
import * as lsp from 'vscode-languageclient/node'
export type StatusParams = {
status: 'loading' | 'ready' | 'failed' | 'clear'
@ -6,11 +6,10 @@ export type StatusParams = {
icon: string
}
export const statusMethod = 'mc-glsl/status'
export const status = new lsp.NotificationType<StatusParams>(statusMethod)
export const statusMethod = new lsp.NotificationType<StatusParams>('mc-glsl/status')
export const updateConfigMethod = 'mc-glsl/updateConfig'
/* export const updateConfigMethod = 'mc-glsl/updateConfig'
export type ConfigUpdateParams = {
kv: { key: string, value: string }[]
}
} */

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

After

Width:  |  Height:  |  Size: 24 KiB

Before After
Before After

12
package-lock.json generated
View file

@ -1805,9 +1805,9 @@
}
},
"node_modules/minimist": {
"version": "1.2.5",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz",
"integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==",
"version": "1.2.6",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz",
"integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==",
"dev": true
},
"node_modules/mkdirp-classic": {
@ -4241,9 +4241,9 @@
}
},
"minimist": {
"version": "1.2.5",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz",
"integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==",
"version": "1.2.6",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz",
"integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==",
"dev": true
},
"mkdirp-classic": {

802
server/Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,19 @@
[package]
name = "filesystem"
version = "0.9.8"
authors = ["Noah Santschi-Cooney <noah@santschi-cooney.ch>"]
edition = "2021"
[lib]
doctest = false
[dependencies]
path-slash = "0.1"
anyhow = "1.0"
url = "2.2"
percent-encoding = "2.1"
serde_json = "1.0"
logging = { path = "../logging" }
tokio = { version = "1.18", features = ["fs"]}
regex = "1.4"
lazy_static = "1.4"

View file

@ -0,0 +1,6 @@
mod url_norm;
mod source_norm;
mod top_levels;
pub use url_norm::*;
pub use source_norm::*;
pub use top_levels::*;

View file

@ -0,0 +1,41 @@
use anyhow::{anyhow, Result};
#[cfg(not(test))]
use std::fs;
use std::{ops::Deref, path::Path};
#[cfg(test)]
use {
logging::{logger, FutureExt},
tokio::fs,
};
/// Denotes a string with any `\r\n` replaced with `\n`
#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct LFString(String);
impl LFString {
pub async fn read<P: AsRef<Path>>(path: P) -> Result<LFString> {
#[cfg(test)]
let read_result = fs::read_to_string(&path).with_logger(logger()).await;
#[cfg(not(test))]
let read_result = fs::read_to_string(&path);
let source = read_result.map_err(|e| anyhow!("error reading {:?}: {}", path.as_ref(), e))?;
Ok(LFString(source.replace("\r\n", "\n")))
}
pub fn with_capacity(capacity: usize) -> Self {
LFString(String::with_capacity(capacity))
}
pub fn from_unchecked(string: String) -> Self {
LFString(string)
}
}
impl Deref for LFString {
type Target = str;
fn deref(&self) -> &Self::Target {
&self.0
}
}

View file

@ -0,0 +1,65 @@
use std::collections::HashSet;
use lazy_static::lazy_static;
use regex::Regex;
use crate::NormalizedPathBuf;
lazy_static! {
static ref RE_WORLD_FOLDER: Regex = Regex::new(r#"^shaders(/world-?\d+)?"#).unwrap();
static ref TOPLEVEL_FILES: HashSet<String> = {
let mut set = HashSet::with_capacity(1716);
for ext in ["fsh", "vsh", "gsh", "csh"] {
set.insert(format!("composite.{}", ext));
set.insert(format!("deferred.{}", ext));
set.insert(format!("prepare.{}", ext));
set.insert(format!("shadowcomp.{}", ext));
for i in 1..=99 {
set.insert(format!("composite{}.{}", i, ext));
set.insert(format!("deferred{}.{}", i, ext));
set.insert(format!("prepare{}.{}", i, ext));
set.insert(format!("shadowcomp{}.{}", i, ext));
}
set.insert(format!("composite_pre.{}", ext));
set.insert(format!("deferred_pre.{}", ext));
set.insert(format!("final.{}", ext));
set.insert(format!("gbuffers_armor_glint.{}", ext));
set.insert(format!("gbuffers_basic.{}", ext));
set.insert(format!("gbuffers_beaconbeam.{}", ext));
set.insert(format!("gbuffers_block.{}", ext));
set.insert(format!("gbuffers_clouds.{}", ext));
set.insert(format!("gbuffers_damagedblock.{}", ext));
set.insert(format!("gbuffers_entities.{}", ext));
set.insert(format!("gbuffers_entities_glowing.{}", ext));
set.insert(format!("gbuffers_hand.{}", ext));
set.insert(format!("gbuffers_hand_water.{}", ext));
set.insert(format!("gbuffers_item.{}", ext));
set.insert(format!("gbuffers_line.{}", ext));
set.insert(format!("gbuffers_skybasic.{}", ext));
set.insert(format!("gbuffers_skytextured.{}", ext));
set.insert(format!("gbuffers_spidereyes.{}", ext));
set.insert(format!("gbuffers_terrain.{}", ext));
set.insert(format!("gbuffers_terrain_cutout.{}", ext));
set.insert(format!("gbuffers_terrain_cutout_mip.{}", ext));
set.insert(format!("gbuffers_terrain_solid.{}", ext));
set.insert(format!("gbuffers_textured.{}", ext));
set.insert(format!("gbuffers_textured_lit.{}", ext));
set.insert(format!("gbuffers_water.{}", ext));
set.insert(format!("gbuffers_weather.{}", ext));
set.insert(format!("shadow.{}", ext));
set.insert(format!("shadow_cutout.{}", ext));
set.insert(format!("shadow_solid.{}", ext));
}
set
};
}
pub fn is_top_level(path: &NormalizedPathBuf) -> bool {
let path_str = &path.to_string();
if !RE_WORLD_FOLDER.is_match(path_str) {
return false;
}
let parts: Vec<&str> = path_str.split('/').collect();
let len = parts.len();
(len == 3 || len == 2) && TOPLEVEL_FILES.contains(parts[len - 1])
}

View file

@ -0,0 +1,151 @@
use std::{
fmt::{Display, Debug},
path::{Path, PathBuf},
};
use anyhow::Result;
use logging::trace;
use path_slash::PathBufExt;
use serde_json::value::Value;
use url::Url;
#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct NormalizedPathBuf(PathBuf);
impl NormalizedPathBuf {
pub fn join(&self, path: impl Into<PathBuf>) -> NormalizedPathBuf {
NormalizedPathBuf(PathBuf::from_slash(self.0.join(path.into()).to_str().unwrap()))
}
pub fn parent(&self) -> Option<NormalizedPathBuf> {
self.0.parent().map(Into::into)
}
pub fn extension(&self) -> Option<&str> {
self.0.extension().and_then(|e| e.to_str())
}
pub fn strip_prefix(&self, prefix: &Self) -> NormalizedPathBuf {
self.0.strip_prefix(prefix.clone().0).unwrap().into()
}
pub fn exists(&self) -> bool {
self.0.exists()
}
}
impl Debug for NormalizedPathBuf {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_fmt(format_args!("{:?}", self.0))
}
}
impl AsRef<Path> for NormalizedPathBuf {
fn as_ref(&self) -> &Path {
self.0.as_path()
}
}
impl AsRef<PathBuf> for NormalizedPathBuf {
fn as_ref(&self) -> &PathBuf {
&self.0
}
}
impl From<&NormalizedPathBuf> for PathBuf {
fn from(p: &NormalizedPathBuf) -> Self {
PathBuf::from_slash(p.0.to_str().unwrap())
}
}
impl From<&Path> for NormalizedPathBuf {
fn from(p: &Path) -> Self {
// TODO: is this right??
PathBuf::from_slash(p.to_str().unwrap()).into()
}
}
impl From<PathBuf> for NormalizedPathBuf {
fn from(p: PathBuf) -> Self {
// don't use p.as_path().into(), it'll cause infinite recursion with above impl
p.to_str().unwrap().into()
}
}
impl From<&str> for NormalizedPathBuf {
fn from(s: &str) -> Self {
// TODO: is this right??
NormalizedPathBuf(PathBuf::from_slash(s))
}
}
impl logging::Value for NormalizedPathBuf {
fn serialize(&self, record: &logging::Record, key: logging::Key, serializer: &mut dyn logging::Serializer) -> logging::Result {
self.0.to_str().unwrap().serialize(record, key, serializer)
}
}
impl Display for NormalizedPathBuf {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0.display())
}
}
impl From<Url> for NormalizedPathBuf {
#[cfg(target_family = "windows")]
fn from(u: Url) -> Self {
let path = PathBuf::from_slash(
percent_encoding::percent_decode_str(u.path().strip_prefix('/').unwrap())
.decode_utf8()
.unwrap(),
);
trace!("converted win path from url"; "old" => u.as_str(), "new" => path.to_str().unwrap());
NormalizedPathBuf(path)
}
#[cfg(target_family = "unix")]
fn from(u: Url) -> Self {
let path = PathBuf::from_slash(percent_encoding::percent_decode_str(u.path()).decode_utf8().unwrap());
trace!("converted unix path from url"; "old" => u.as_str(), "new" => path.to_str().unwrap());
NormalizedPathBuf(path)
}
}
impl TryFrom<&Value> for NormalizedPathBuf {
type Error = anyhow::Error;
#[cfg(target_family = "windows")]
fn try_from(v: &Value) -> Result<Self> {
if !v.is_string() {
return Err(anyhow::format_err!("cannot convert {:?} to PathBuf", v));
}
let path = v.to_string();
let path = PathBuf::from_slash(
percent_encoding::percent_decode_str(path.trim_start_matches('"').trim_end_matches('"').strip_prefix('/').unwrap())
.decode_utf8()?,
);
trace!("converted win path from json"; "old" => v.to_string(), "new" => path.to_str().unwrap());
Ok(NormalizedPathBuf(path))
}
#[cfg(target_family = "unix")]
fn try_from(v: &serde_json::value::Value) -> Result<Self> {
if !v.is_string() {
return Err(anyhow::format_err!("cannot convert {:?} to PathBuf", v));
}
let path = v.to_string();
let path =
PathBuf::from_slash(percent_encoding::percent_decode_str(path.trim_start_matches('"').trim_end_matches('"')).decode_utf8()?);
trace!("converted unix path from json"; "old" => v.to_string(), "new" => path.to_str().unwrap());
Ok(NormalizedPathBuf(path))
}
}

25
server/graph/Cargo.toml Normal file
View file

@ -0,0 +1,25 @@
[package]
name = "graph"
version = "0.9.8"
authors = ["Noah Santschi-Cooney <noah@santschi-cooney.ch>"]
edition = "2021"
[lib]
doctest = false
[dependencies]
anyhow = "1.0"
petgraph = "0.6"
logging = { path = "../logging" }
logging_macro = { path = "../logging_macro" }
slog = { version = "2.7", features = [ "max_level_trace", "release_max_level_trace" ] }
slog-scope = "4.4"
sourcefile = { path = "../sourcefile" }
tower-lsp = "0.17.0"
thiserror = "1.0"
[dev-dependencies]
tempdir = "0.3"
fs_extra = "1.2"
hamcrest2 = "*"
pretty_assertions = "1.1"

342
server/graph/src/dfs.rs Normal file
View file

@ -0,0 +1,342 @@
use crate::{graph::CachedStableGraph, FilialTuple};
use petgraph::stable_graph::{NodeIndex, StableDiGraph};
use std::{fmt::Debug, hash::Hash};
use anyhow::Result;
struct VisitCount {
node: NodeIndex,
// how many times we have backtracked to this node
// after exhausting a DFS along one of this node's
// outgoing edges
touch: usize,
// how many times we have to backtrack to this node
// after exhausting a DFS along one of this node's
// outgoing edges before we backtrack to the parent
// node of this node that we came from during the
// traversal. Aint that a mouthful.
children: usize,
}
/// Performs a depth-first search with duplicates
pub struct Dfs<'a, K, V>
where
K: Hash + Clone + ToString + Eq + Debug,
V: Ord + Copy,
{
graph: &'a CachedStableGraph<K, V>,
// TODO: how can we collapse these
stack: Vec<NodeIndex>,
cycle: Vec<VisitCount>,
}
impl<'a, K, V> Dfs<'a, K, V>
where
K: Hash + Clone + ToString + Eq + Debug,
V: Ord + Copy,
{
pub fn new(graph: &'a CachedStableGraph<K, V>, start: NodeIndex) -> Self {
Dfs {
stack: vec![start],
graph,
cycle: Vec::new(),
}
}
fn reset_path_to_branch(&mut self) {
while let Some(par) = self.cycle.last_mut() {
par.touch += 1;
if par.touch > par.children {
self.cycle.pop();
} else {
break;
}
}
}
fn check_for_cycle(&self, children: &[NodeIndex]) -> Result<(), CycleError> {
for prev in &self.cycle {
for child in children {
if prev.node == *child {
let cycle_nodes: Vec<NodeIndex> = self.cycle.iter().map(|n| n.node).collect();
return Err(CycleError::new(&cycle_nodes, *child, self.graph));
}
}
}
Ok(())
}
}
impl<'a, K, V> Iterator for Dfs<'a, K, V>
where
K: Hash + Clone + ToString + Eq + Debug,
V: Ord + Copy,
{
type Item = Result<FilialTuple<NodeIndex>, CycleError>;
fn next(&mut self) -> Option<Result<FilialTuple<NodeIndex>, CycleError>> {
let parent = self.cycle.last().map(|p| p.node);
if let Some(child) = self.stack.pop() {
self.cycle.push(VisitCount {
node: child,
children: Into::<&StableDiGraph<K, V>>::into(self.graph).edges(child).count(),
touch: 1,
});
let children: Vec<_> = self.graph.get_all_children(child).rev().collect();
if !children.is_empty() {
if let Err(e) = self.check_for_cycle(&children) {
return Some(Err(e));
}
for child in children {
self.stack.push(child);
}
} else {
self.reset_path_to_branch();
}
return Some(Ok(FilialTuple { child, parent }));
}
None
}
}
use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity, Position, Range};
use std::{error::Error as StdError, fmt::Display};
#[derive(Debug)]
pub struct CycleError(Vec<String>);
impl StdError for CycleError {}
impl CycleError {
pub fn new<K, V>(nodes: &[NodeIndex], current_node: NodeIndex, graph: &CachedStableGraph<K, V>) -> Self
where
K: Hash + Clone + ToString + Eq + Debug,
V: Ord + Copy,
{
let mut resolved_nodes: Vec<K> = nodes.iter().map(|i| graph[*i].clone()).collect();
resolved_nodes.push(graph[current_node].clone());
CycleError(resolved_nodes.into_iter().map(|p| p.to_string()).collect())
}
}
impl Display for CycleError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut disp = String::new();
disp.push_str(format!("Include cycle detected:\n{:?} imports ", self.0[0]).as_str());
for p in &self.0[1..self.0.len() - 1] {
disp.push_str(format!("\n{:?}, which imports ", *p).as_str());
}
disp.push_str(format!("\n{:?}", self.0[self.0.len() - 1]).as_str());
f.write_str(disp.as_str())
}
}
impl From<CycleError> for Diagnostic {
fn from(e: CycleError) -> Diagnostic {
Diagnostic {
severity: Some(DiagnosticSeverity::ERROR),
range: Range::new(Position::new(0, 0), Position::new(0, 500)),
source: Some("mcglsl".into()),
message: e.into(),
code: None,
tags: None,
related_information: None,
code_description: Option::None,
data: Option::None,
}
}
}
impl From<CycleError> for String {
fn from(e: CycleError) -> String {
format!("{}", e)
}
}
#[cfg(test)]
mod dfs_test {
use hamcrest2::prelude::*;
use hamcrest2::{assert_that, ok};
use petgraph::stable_graph::StableDiGraph;
use petgraph::{algo::is_cyclic_directed, graph::NodeIndex};
use crate::dfs;
use crate::graph::CachedStableGraph;
#[test]
#[logging_macro::scope]
fn test_graph_dfs() {
{
let mut graph = CachedStableGraph::new();
let idx0 = graph.add_node(&"0".to_string());
let idx1 = graph.add_node(&"1".to_string());
let idx2 = graph.add_node(&"2".to_string());
let idx3 = graph.add_node(&"3".to_string());
graph.add_edge(idx0, idx1, 2);
graph.add_edge(idx0, idx2, 3);
graph.add_edge(idx1, idx3, 5);
let dfs = dfs::Dfs::new(&graph, idx0);
let mut collection = Vec::new();
for i in dfs {
assert_that!(&i, ok());
collection.push(i.unwrap());
}
let nodes: Vec<NodeIndex> = collection.iter().map(|n| n.child).collect();
let parents: Vec<Option<NodeIndex>> = collection.iter().map(|n| n.parent).collect();
// 0
// / \
// 1 2
// /
// 3
let expected_nodes = vec![idx0, idx1, idx3, idx2];
assert_eq!(expected_nodes, nodes);
let expected_parents = vec![None, Some(idx0), Some(idx1), Some(idx0)];
assert_eq!(expected_parents, parents);
assert!(!is_cyclic_directed(Into::<&StableDiGraph<_, _>>::into(&graph)));
}
{
let mut graph = CachedStableGraph::new();
let idx0 = graph.add_node(&"0".to_string());
let idx1 = graph.add_node(&"1".to_string());
let idx2 = graph.add_node(&"2".to_string());
let idx3 = graph.add_node(&"3".to_string());
let idx4 = graph.add_node(&"4".to_string());
let idx5 = graph.add_node(&"5".to_string());
let idx6 = graph.add_node(&"6".to_string());
let idx7 = graph.add_node(&"7".to_string());
graph.add_edge(idx0, idx1, 2);
graph.add_edge(idx0, idx2, 3);
graph.add_edge(idx1, idx3, 5);
graph.add_edge(idx1, idx4, 6);
graph.add_edge(idx2, idx4, 5);
graph.add_edge(idx2, idx5, 4);
graph.add_edge(idx3, idx6, 4);
graph.add_edge(idx4, idx6, 4);
graph.add_edge(idx6, idx7, 4);
let dfs = dfs::Dfs::new(&graph, idx0);
let mut collection = Vec::new();
for i in dfs {
assert_that!(&i, ok());
collection.push(i.unwrap());
}
let nodes: Vec<NodeIndex> = collection.iter().map(|n| n.child).collect();
let parents: Vec<Option<NodeIndex>> = collection.iter().map(|n| n.parent).collect();
// 0
// / \
// 1 2
// / \ / \
// 3 4 5
// \ /
// 6 - 7
let expected_nodes = vec![idx0, idx1, idx3, idx6, idx7, idx4, idx6, idx7, idx2, idx5, idx4, idx6, idx7];
assert_eq!(expected_nodes, nodes);
let expected_parents = vec![
None,
Some(idx0),
Some(idx1),
Some(idx3),
Some(idx6),
Some(idx1),
Some(idx4),
Some(idx6),
Some(idx0),
Some(idx2),
Some(idx2),
Some(idx4),
Some(idx6),
];
assert_eq!(expected_parents, parents);
assert!(!is_cyclic_directed(Into::<&StableDiGraph<_, _>>::into(&graph)));
}
}
#[test]
#[logging_macro::scope]
fn test_graph_dfs_cycle() {
{
let mut graph = CachedStableGraph::new();
let idx0 = graph.add_node(&"0".to_string());
let idx1 = graph.add_node(&"1".to_string());
let idx2 = graph.add_node(&"2".to_string());
let idx3 = graph.add_node(&"3".to_string());
let idx4 = graph.add_node(&"4".to_string());
let idx5 = graph.add_node(&"5".to_string());
let idx6 = graph.add_node(&"6".to_string());
let idx7 = graph.add_node(&"7".to_string());
graph.add_edge(idx0, idx1, 2);
graph.add_edge(idx0, idx2, 3);
graph.add_edge(idx1, idx3, 5);
graph.add_edge(idx1, idx4, 6);
graph.add_edge(idx2, idx4, 5);
graph.add_edge(idx2, idx5, 4);
graph.add_edge(idx3, idx6, 4);
graph.add_edge(idx4, idx6, 4);
graph.add_edge(idx6, idx7, 4);
graph.add_edge(idx7, idx4, 4);
let mut dfs = dfs::Dfs::new(&graph, idx0);
for _ in 0..5 {
if let Some(i) = dfs.next() {
assert_that!(&i, ok());
}
}
// 0
// / \
// 1 2
// / \ / \
// 3 4 5
// \ / \
// 6 - 7
let next = dfs.next().unwrap();
assert_that!(next, err());
assert!(is_cyclic_directed(Into::<&StableDiGraph<_, _>>::into(&graph)));
}
{
let mut graph = CachedStableGraph::new();
let idx0 = graph.add_node(&"0".to_string());
let idx1 = graph.add_node(&"1".to_string());
graph.add_edge(idx0, idx1, 2);
graph.add_edge(idx1, idx0, 2);
let mut dfs = dfs::Dfs::new(&graph, idx1);
println!("{:?}", dfs.next());
println!("{:?}", dfs.next());
println!("{:?}", dfs.next());
}
}
}

454
server/graph/src/graph.rs Normal file
View file

@ -0,0 +1,454 @@
use anyhow::format_err;
use anyhow::Result;
use petgraph::stable_graph::EdgeIndex;
use petgraph::stable_graph::NodeIndex;
use petgraph::stable_graph::StableDiGraph;
use petgraph::visit::EdgeRef;
use petgraph::Direction;
use std::collections::{HashMap, HashSet};
use std::fmt::Debug;
use std::hash::Hash;
use std::ops::Index;
use std::ops::IndexMut;
/// Wraps a `StableDiGraph` with caching behaviour for node search by maintaining
/// an index for node value to node index and a reverse index.
/// This allows for **O(1)** lookup for a value if it exists, else **O(n)**.
pub struct CachedStableGraph<K, V>
where
K: Hash + Clone + Eq + Debug,
V: Ord + Copy,
{
// StableDiGraph is used as it allows for String node values, essential for
// generating the GraphViz DOT render.
pub graph: StableDiGraph<K, V>,
cache: HashMap<K, NodeIndex>,
// Maps a node index to its abstracted string representation.
// Mainly used as the graph is based on NodeIndex.
#[cfg(test)]
reverse_index: HashMap<NodeIndex, K>,
}
impl<K, V> CachedStableGraph<K, V>
where
K: Hash + Clone + Eq + Debug,
V: Ord + Copy,
{
#[allow(clippy::new_without_default)]
pub fn new() -> Self {
CachedStableGraph {
graph: StableDiGraph::new(),
cache: HashMap::new(),
#[cfg(test)]
reverse_index: HashMap::new(),
}
}
#[inline]
pub fn node_count(&self) -> usize {
self.graph.node_count()
}
// #[inline]
// pub fn inner(&self) -> &StableDiGraph<K, V> {
// &self.graph
// }
pub fn parents(&self, node: NodeIndex) -> impl Iterator<Item = NodeIndex> + '_ {
self.graph.edges_directed(node, Direction::Incoming).map(|e| e.source())
}
/// Returns the `NodeIndex` for a given graph node with the value of `name`
/// and caches the result in the `HashMap`. Complexity is **O(1)** if the value
/// is cached (which should always be the case), else **O(n)** where **n** is
/// the number of node indices, as an exhaustive search must be done.
pub fn find_node(&mut self, name: &K) -> Option<NodeIndex> {
match self.cache.get(name) {
Some(n) => Some(*n),
None => {
// If the string is not in cache, O(n) search the graph (i know...) and then cache the NodeIndex
// for later
let n = self.graph.node_indices().find(|n| self.graph[*n] == *name);
if let Some(n) = n {
self.cache.insert(name.clone(), n);
}
n
}
}
}
/// Returns all child node indexes for a parent, in order of import. May include duplicates if a child
/// is imported more than once into the parent.
pub fn get_all_children(&self, parent: NodeIndex) -> impl DoubleEndedIterator<Item = NodeIndex> + '_ {
self.get_all_edges_from(parent).map(|p| p.0)
}
/// Returns an iterator over all the edge values of type `V`'s between a parent and its child for all the
/// positions that the child may be imported into the parent, in order of import.
pub fn get_edges_between(&self, parent: NodeIndex, child: NodeIndex) -> impl DoubleEndedIterator<Item = V> + '_ {
let mut edges = self
.graph
.edges(parent)
.filter_map(move |edge| {
let target = self.graph.edge_endpoints(edge.id()).unwrap().1;
if target != child {
return None;
}
Some(self.graph[edge.id()])
})
.collect::<Vec<V>>();
edges.sort();
edges.into_iter()
}
/// Returns an iterator over all the `(NodeIndex, T)` tuples between a node and all its children, in order
/// of import.
pub fn get_all_edges_from(&self, parent: NodeIndex) -> impl DoubleEndedIterator<Item = (NodeIndex, V)> + '_ {
let mut edges = self
.graph
.edges(parent)
.map(|edge| {
let child = self.graph.edge_endpoints(edge.id()).unwrap().1;
(child, self.graph[edge.id()])
})
.collect::<Vec<_>>();
edges.sort_by(|x, y| x.1.cmp(&y.1));
edges.into_iter()
}
// pub fn symmetric_closure(&self) {}
pub fn add_node(&mut self, name: &K) -> NodeIndex {
if let Some(idx) = self.cache.get(name) {
return *idx;
}
let idx = self.graph.add_node(name.clone());
self.cache.insert(name.to_owned(), idx);
#[cfg(test)]
self.reverse_index.insert(idx, name.to_owned());
idx
}
/// Adds a directional edge of type `V` between `parent` and `child`.
#[inline]
pub fn add_edge(&mut self, parent: NodeIndex, child: NodeIndex, meta: V) -> EdgeIndex {
self.graph.add_edge(parent, child, meta)
}
#[inline]
pub fn remove_edge(&mut self, parent: NodeIndex, child: NodeIndex, position: V) {
self.graph
.edges(parent)
.find(|edge| self.graph.edge_endpoints(edge.id()).unwrap().1 == child && *edge.weight() == position)
.map(|edge| edge.id())
.and_then(|edge| self.graph.remove_edge(edge));
}
#[inline]
pub fn child_node_indexes(&self, node: NodeIndex) -> impl Iterator<Item = NodeIndex> + '_ {
self.graph.neighbors(node)
}
#[inline]
pub fn parent_node_indexes(&self, node: NodeIndex) -> impl Iterator<Item = NodeIndex> + '_ {
self.graph.neighbors_directed(node, Direction::Incoming)
}
pub fn root_ancestors_for_key(&mut self, path: &K) -> Result<Option<Vec<NodeIndex>>> {
let node = match self.find_node(path) {
Some(n) => n,
None => return Err(format_err!("node not found {:?}", path)),
};
Ok(self.root_ancestors(node))
}
#[inline]
pub fn root_ancestors(&self, node: NodeIndex) -> Option<Vec<NodeIndex>> {
let mut visited = HashSet::new();
self.get_root_ancestors(node, node, &mut visited)
}
fn get_root_ancestors(&self, initial: NodeIndex, node: NodeIndex, visited: &mut HashSet<NodeIndex>) -> Option<Vec<NodeIndex>> {
if node == initial && !visited.is_empty() {
return None;
}
let parents: Vec<_> = self.parent_node_indexes(node).collect();
let mut collection = Vec::with_capacity(parents.len());
for ancestor in &parents {
visited.insert(*ancestor);
}
for ancestor in &parents {
if self.parent_node_indexes(*ancestor).next().is_some() {
collection.extend(self.get_root_ancestors(initial, *ancestor, visited).unwrap_or_default());
} else {
collection.push(*ancestor);
}
}
Some(collection)
}
}
impl<K, V> Index<NodeIndex> for CachedStableGraph<K, V>
where
K: Hash + Clone + Eq + Debug,
V: Ord + Copy,
{
type Output = K;
#[inline]
fn index(&self, index: NodeIndex) -> &Self::Output {
&self.graph[index]
}
}
impl<K, V> IndexMut<NodeIndex> for CachedStableGraph<K, V>
where
K: Hash + Clone + Eq + Debug,
V: Ord + Copy,
{
#[inline]
fn index_mut(&mut self, index: NodeIndex) -> &mut Self::Output {
self.graph.index_mut(index)
}
}
#[cfg(test)]
impl<K, V> CachedStableGraph<K, V>
where
K: Hash + Clone + Eq + Debug,
V: Ord + Copy,
{
fn parent_node_names(&self, node: NodeIndex) -> Vec<K> {
self.graph
.neighbors_directed(node, Direction::Incoming)
.map(|n| self.reverse_index.get(&n).unwrap().clone())
.collect()
}
fn child_node_names(&self, node: NodeIndex) -> Vec<K> {
self.graph
.neighbors(node)
.map(|n| self.reverse_index.get(&n).unwrap().clone())
.collect()
}
fn remove_node(&mut self, name: &K) {
let idx = self.cache.remove(name);
if let Some(idx) = idx {
self.graph.remove_node(idx);
}
}
}
impl<'a, K, V> From<&'a CachedStableGraph<K, V>> for &'a StableDiGraph<K, V>
where
K: Hash + Clone + Eq + Debug,
V: Ord + Copy,
{
#[inline]
fn from(val: &'a CachedStableGraph<K, V>) -> Self {
&val.graph
}
}
#[cfg(test)]
mod graph_test {
use petgraph::graph::NodeIndex;
use crate::graph::CachedStableGraph;
#[test]
#[logging_macro::scope]
fn test_graph_two_connected_nodes() {
let mut graph = CachedStableGraph::new();
let idx1 = graph.add_node(&"sample");
let idx2 = graph.add_node(&"banana");
graph.add_edge(idx1, idx2, 100);
let children = graph.child_node_names(idx1);
assert_eq!(children.len(), 1);
assert_eq!(children[0], "banana");
let children: Vec<NodeIndex> = graph.child_node_indexes(idx1).collect();
assert_eq!(children.len(), 1);
assert_eq!(children[0], idx2);
let parents = graph.parent_node_names(idx1);
assert_eq!(parents.len(), 0);
let parents = graph.parent_node_names(idx2);
assert_eq!(parents.len(), 1);
assert_eq!(parents[0], "sample");
let parents: Vec<_> = graph.parent_node_indexes(idx2).collect();
assert_eq!(parents.len(), 1);
assert_eq!(parents[0], idx1);
let ancestors = graph.root_ancestors(idx2).unwrap();
assert_eq!(ancestors.len(), 1);
assert_eq!(ancestors[0], idx1);
let ancestors = graph.root_ancestors(idx1).unwrap();
assert_eq!(ancestors.len(), 0);
graph.remove_node(&"sample");
assert_eq!(graph.graph.node_count(), 1);
assert!(graph.find_node(&"sample").is_none());
let neighbors = graph.child_node_names(idx2);
assert_eq!(neighbors.len(), 0);
}
#[test]
#[logging_macro::scope]
fn test_double_import() {
let mut graph = CachedStableGraph::new();
let idx0 = graph.add_node(&"0");
let idx1 = graph.add_node(&"1");
graph.add_edge(idx0, idx1, 200);
graph.add_edge(idx0, idx1, 400);
// 0
// / \
// 1 1
assert_eq!(2, graph.get_edges_between(idx0, idx1).count());
let mut edge_metas = graph.get_edges_between(idx0, idx1);
assert_eq!(Some(200), edge_metas.next());
assert_eq!(Some(400), edge_metas.next());
}
#[test]
#[logging_macro::scope]
fn test_collect_root_ancestors() {
{
let mut graph = CachedStableGraph::new();
let idx0 = graph.add_node(&"0");
let idx1 = graph.add_node(&"1");
let idx2 = graph.add_node(&"2");
let idx3 = graph.add_node(&"3");
graph.add_edge(idx0, idx1, 200);
graph.add_edge(idx1, idx2, 300);
graph.add_edge(idx3, idx1, 400);
// 0 3
// |/
// 1
// |
// 2
let roots = graph.root_ancestors(idx2).unwrap();
assert_eq!(roots, vec![idx3, idx0]);
let roots = graph.root_ancestors(idx1).unwrap();
assert_eq!(roots, vec![idx3, idx0]);
let roots = graph.root_ancestors(idx0).unwrap();
assert_eq!(roots, vec![]);
let roots = graph.root_ancestors(idx3).unwrap();
assert_eq!(roots, vec![]);
}
{
let mut graph = CachedStableGraph::new();
let idx0 = graph.add_node(&"0");
let idx1 = graph.add_node(&"1");
let idx2 = graph.add_node(&"2");
let idx3 = graph.add_node(&"3");
graph.add_edge(idx0, idx1, 200);
graph.add_edge(idx0, idx2, 300);
graph.add_edge(idx1, idx3, 500);
// 0
// / \
// 1 2
// /
// 3
let roots = graph.root_ancestors(idx3).unwrap();
assert_eq!(roots, vec![idx0]);
let roots = graph.root_ancestors(idx2).unwrap();
assert_eq!(roots, vec![idx0]);
let roots = graph.root_ancestors(idx1).unwrap();
assert_eq!(roots, vec![idx0]);
let roots = graph.root_ancestors(idx0).unwrap();
assert_eq!(roots, vec![]);
}
{
let mut graph = CachedStableGraph::new();
let idx0 = graph.add_node(&"0");
let idx1 = graph.add_node(&"1");
let idx2 = graph.add_node(&"2");
let idx3 = graph.add_node(&"3");
graph.add_edge(idx0, idx1, 200);
graph.add_edge(idx2, idx3, 300);
graph.add_edge(idx1, idx3, 500);
// 0
// \
// 2 1
// \ /
// 3
let roots = graph.root_ancestors(idx3).unwrap();
assert_eq!(roots, vec![idx0, idx2]);
let roots = graph.root_ancestors(idx2).unwrap();
assert_eq!(roots, vec![]);
let roots = graph.root_ancestors(idx1).unwrap();
assert_eq!(roots, vec![idx0]);
let roots = graph.root_ancestors(idx0).unwrap();
assert_eq!(roots, vec![]);
}
{
let mut graph = CachedStableGraph::new();
let idx0 = graph.add_node(&"0");
let idx1 = graph.add_node(&"1");
let idx2 = graph.add_node(&"2");
let idx3 = graph.add_node(&"3");
graph.add_edge(idx0, idx1, 200);
graph.add_edge(idx1, idx2, 400);
graph.add_edge(idx1, idx3, 600);
// 0
// |
// 1
// / \
// 2 3
let roots = graph.root_ancestors(idx3).unwrap();
assert_eq!(roots, vec![idx0]);
let roots = graph.root_ancestors(idx2).unwrap();
assert_eq!(roots, vec![idx0]);
let roots = graph.root_ancestors(idx1).unwrap();
assert_eq!(roots, vec![idx0]);
let roots = graph.root_ancestors(idx0).unwrap();
assert_eq!(roots, vec![]);
}
}
}

18
server/graph/src/lib.rs Normal file
View file

@ -0,0 +1,18 @@
mod graph;
pub mod dfs;
pub use graph::*;
pub use petgraph::stable_graph::NodeIndex;
pub use petgraph::dot::Config;
pub use petgraph::dot;
/// FilialTuple represents a tuple (not really) of a child and any legitimate
/// parent. Parent can be nullable in the case of the child being a top level
/// node in the tree.
#[derive(Hash, PartialEq, Eq, Debug, Clone)]
pub struct FilialTuple<T> {
// pub child: NodeIndex,
// pub parent: Option<NodeIndex>,
pub child: T,
pub parent: Option<T>,
}

View file

@ -0,0 +1,29 @@
[package]
name = "include_merger"
version = "0.9.8"
authors = ["Noah Santschi-Cooney <noah@santschi-cooney.ch>"]
edition = "2021"
[lib]
doctest = false
[dependencies]
anyhow = "1.0"
tower-lsp = "0.17.0"
logging = { path = "../logging" }
logging_macro = { path = "../logging_macro" }
graph = { path = "../graph" }
filesystem = { path = "../filesystem" }
tokio = { version = "1.18", features = ["fs"]}
workspace_tree = { path = "../workspace_tree" }
sourcefile = { path = "../sourcefile" }
tree-sitter = "0.20.6"
tree-sitter-glsl = "0.1.2"
opengl = { path = "../opengl" }
[dev-dependencies]
tempdir = "0.3"
fs_extra = "1.2"
pretty_assertions = "1.2"

View file

@ -0,0 +1,13 @@
pub const OPTIFINE_PREAMBLE: &str = r#"#define MC_VERSION 11800
#define MC_GL_VERSION 320
#define MC_GLSL_VERSION 150
#define MC_OS_LINUX
#define MC_GL_VENDOR_NVIDIA
#define MC_GL_RENDERER_GEFORCE
#define MC_NORMAL_MAP
#define MC_SPECULAR_MAP
#define MC_RENDER_QUALITY 1.0
#define MC_SHADOW_QUALITY 1.0
#define MC_HAND_DEPTH 0.125
#define MC_OLD_HAND_LIGHT
#define MC_OLD_LIGHTING"#;

View file

@ -0,0 +1,3 @@
mod merge_views;
mod consts;
pub use merge_views::*;

View file

@ -0,0 +1,572 @@
use std::cmp::min;
use std::collections::{HashMap, LinkedList, VecDeque};
use std::iter::Peekable;
use core::slice::Iter;
use filesystem::{LFString, NormalizedPathBuf};
use graph::FilialTuple;
use logging::debug;
use crate::consts;
use sourcefile::{IncludeLine, SourceFile, SourceMapper, Version};
/// Merges the source strings according to the nodes comprising a tree of imports into a GLSL source string
/// that can be handed off to the GLSL compiler.
pub struct MergeViewBuilder<'a> {
nodes: Peekable<Iter<'a, FilialTuple<&'a SourceFile>>>,
// sources: &'a HashMap<NormalizedPathBuf, LFString>,
/// contains additionally inserted lines such as #line and other directives, preamble defines etc
extra_lines: Vec<String>,
// graph: &'a CachedStableGraph<NormalizedPathBuf, IncludeLine>,
source_mapper: &'a mut SourceMapper<NormalizedPathBuf>,
/// holds the offset into the child which has been added to the merge list for a parent.
/// A child can have multiple parents for a given tree, and be included multiple times
/// by the same parent, hence we have to track it for a ((child, parent), line) tuple
/// instead of just the child or (child, parent).
last_offset_set: HashMap<FilialTuple<&'a NormalizedPathBuf>, usize>,
/// holds, for any given filial tuple, the iterator yielding all the positions at which the child
/// is included into the parent in line-sorted order. This is necessary for files that are imported
/// more than once into the same parent, so we can easily get the next include position.
parent_child_edge_iterator: HashMap<FilialTuple<&'a NormalizedPathBuf>, Box<(dyn Iterator<Item = IncludeLine> + 'a)>>,
// #line directives need to be adjusted based on GPU vendor + document glsl version
gpu_vendor: opengl::GPUVendor,
document_glsl_version: sourcefile::Version,
}
impl<'a> MergeViewBuilder<'a> {
pub fn new(
nodes: &'a [FilialTuple<&'a SourceFile>], source_mapper: &'a mut SourceMapper<NormalizedPathBuf>, gpu_vendor: opengl::GPUVendor,
document_glsl_version: sourcefile::Version,
) -> Self {
println!("{}", nodes.len());
MergeViewBuilder {
nodes: nodes.iter().peekable(),
extra_lines: Vec::with_capacity((nodes.len() * 2) + 2),
source_mapper,
last_offset_set: HashMap::new(),
parent_child_edge_iterator: HashMap::new(),
gpu_vendor,
document_glsl_version,
}
}
pub fn build(&mut self) -> LFString {
// list of source code views onto the below sources
let mut merge_list: LinkedList<&'a str> = LinkedList::new();
// invariant: nodes_iter always has _at least_ one element. Can't save a not-file :B
let first = self.nodes.next().unwrap().child;
let first_path = &first.path;
let first_source = &first.source;
// seed source_mapper with top-level file
self.source_mapper.get_num(&first.path);
// add the optifine preamble (and extra compatibility mangling eventually)
let version_line_offset = self.find_version_offset(first_source);
let (version_char_for_line, version_char_following_line) = self.char_offset_for_line(version_line_offset, first_source);
eprintln!(
"line {} char for line {} char after line {}",
version_line_offset, version_char_for_line, version_char_following_line
);
self.add_preamble(
version_line_offset,
version_char_following_line,
first_path,
first_source,
&mut merge_list,
);
self.set_last_offset_for_tuple(None, first_path, version_char_following_line);
// self.set_last_offset_for_tuple(None, first, 0);
// stack to keep track of the depth first traversal
let mut stack: VecDeque<_> = VecDeque::<&'a NormalizedPathBuf>::new();
// where the magic happens!
self.create_merge_views(&mut merge_list, &mut stack);
// now we add a view of the remainder of the root file
let offset = self.get_last_offset_for_tuple(None, first_path).unwrap();
let len = first_source.len();
merge_list.push_back(&first_source[min(offset, len)..]);
// Now merge all the views into one singular String to return
let total_len = merge_list.iter().fold(0, |a, b| a + b.len());
let mut merged = String::with_capacity(total_len);
merged.extend(merge_list);
LFString::from_unchecked(merged)
}
fn create_merge_views(&mut self, merge_list: &mut LinkedList<&'a str>, stack: &mut VecDeque<&'a NormalizedPathBuf>) {
loop {
let n = match self.nodes.next() {
Some(n) => n,
None => return,
};
// invariant: never None as only the first element in `nodes` should have a None, which is popped off in the calling function
let (parent, child) = (n.parent.unwrap(), n.child);
let parent_path = &parent.path;
let child_path = &child.path;
// gets the next include position for the filial tuple, seeding if this is the first time querying this tuple
let edge = self
.parent_child_edge_iterator
.entry(FilialTuple {
child: &n.child.path,
parent: n.parent.map(|p| &p.path),
})
.or_insert_with(|| {
// let child_positions = self.graph.get_edges_between(parent, child);
Box::new(parent.includes_of_path(child_path).unwrap())
})
.next()
.unwrap();
let parent_source = &parent.source;
let (char_for_line, char_following_line) = self.char_offset_for_line(edge, parent_source);
let offset = *self
.set_last_offset_for_tuple(stack.back().copied(), parent_path, char_following_line)
.get_or_insert(0);
debug!("creating view to start child file";
"parent" => parent_path, "child" => child_path,
"grandparent" => stack.back(),
"last_parent_offset" => offset, "line" => edge, "char_for_line" => char_for_line,
"char_following_line" => char_following_line,
);
merge_list.push_back(&parent_source[offset..char_for_line]);
self.add_opening_line_directive(child_path, merge_list);
match self.nodes.peek() {
Some(next) => {
let next = *next;
// if the next pair's parent is not a child of the current pair, we dump the rest of this childs source
if &next.parent.unwrap().path != child_path {
let child_source = &child.source;
// if ends in \n\n, we want to exclude the last \n for some reason. Ask optilad
let double_newline_offset = match child_source.ends_with('\n') {
true => child_source.len() - 1,
false => child_source.len(),
};
merge_list.push_back(&child_source[..double_newline_offset]);
self.set_last_offset_for_tuple(Some(parent_path), child_path, 0);
// +1 because edge.line is 0 indexed ~~but #line is 1 indexed and references the *following* line~~
// turns out #line _is_ 0 indexed too? Im really confused
self.add_closing_line_directive(edge + self.get_line_directive_offset(), parent_path, merge_list);
// if the next pair's parent is not the current pair's parent, we need to bubble up
if stack.contains(&&next.parent.unwrap().path) {
return;
}
continue;
}
stack.push_back(&parent.path);
self.create_merge_views(merge_list, stack);
stack.pop_back();
let offset = self.get_last_offset_for_tuple(Some(parent_path), child_path).unwrap();
let child_source = &child.source;
// this evaluates to false once the file contents have been exhausted aka offset = child_source.len() + 1
let end_offset = match child_source.ends_with('\n') {
true => 1,
false => 0,
};
if offset < child_source.len() - end_offset {
// if ends in \n\n, we want to exclude the last \n for some reason. Ask optilad
merge_list.push_back(&child_source[offset..child_source.len() - end_offset]);
self.set_last_offset_for_tuple(Some(parent_path), child_path, 0);
}
// +1 because edge.line is 0 indexed ~~but #line is 1 indexed and references the *following* line~~
// turns out #line _is_ 0 indexed too? Im really confused
self.add_closing_line_directive(edge + self.get_line_directive_offset(), parent_path, merge_list);
// we need to check the next item at the point of original return further down the callstack
if self.nodes.peek().is_some() && stack.contains(&&self.nodes.peek().unwrap().parent.unwrap().path) {
return;
}
}
None => {
// let child_source = self.sources.get(child_path).unwrap();
let child_source = &child.source;
// if ends in \n\n, we want to exclude the last \n for some reason. Ask optilad
let double_newline_offset = match child_source.ends_with('\n') {
true => child_source.len() - 1,
false => child_source.len(),
};
merge_list.push_back(&child_source[..double_newline_offset]);
self.set_last_offset_for_tuple(Some(parent_path), child_path, 0);
// +1 because edge.line is 0 indexed ~~but #line is 1 indexed and references the *following* line~~
// turns out #line _is_ 0 indexed too? Im really confused
self.add_closing_line_directive(edge + self.get_line_directive_offset(), parent_path, merge_list);
}
}
}
}
fn set_last_offset_for_tuple(
&mut self, parent: Option<&'a NormalizedPathBuf>, child: &'a NormalizedPathBuf, offset: usize,
) -> Option<usize> {
debug!("inserting last offset";
"parent" => parent,
"child" => &child,
"offset" => offset);
self.last_offset_set.insert(FilialTuple { child, parent }, offset)
}
#[inline]
fn get_last_offset_for_tuple(&self, parent: Option<&'a NormalizedPathBuf>, child: &'a NormalizedPathBuf) -> Option<usize> {
self.last_offset_set.get(&FilialTuple { child, parent }).copied()
}
// returns the character offset + 1 of the end of line number `line` and the character
// offset + 1 for the end of the line after the previous one
fn char_offset_for_line(&self, line_num: impl Into<usize> + Copy, source: &str) -> (usize, usize) {
let mut char_for_line: usize = 0;
let mut char_following_line: usize = 0;
for (n, line) in source.lines().enumerate() {
if n == line_num.into() {
char_following_line += line.len() + 1;
break;
}
char_for_line += line.len() + 1;
char_following_line = char_for_line;
}
(char_for_line, char_following_line)
}
#[inline]
fn find_version_offset(&self, source: &str) -> usize {
source
.lines()
.enumerate()
.find(|(_, line)| line.starts_with("#version "))
.map_or(0, |(i, _)| i)
}
#[inline]
fn get_line_directive_offset(&self) -> usize {
match (self.gpu_vendor, self.document_glsl_version) {
(opengl::GPUVendor::NVIDIA, Version::Glsl110)
| (opengl::GPUVendor::NVIDIA, Version::Glsl120)
| (opengl::GPUVendor::NVIDIA, Version::Glsl130)
| (opengl::GPUVendor::NVIDIA, Version::Glsl140)
| (opengl::GPUVendor::NVIDIA, Version::Glsl150) => 1,
_ => 0,
}
}
fn add_preamble(
&mut self, version_line_offset: impl Into<usize>, version_char_offset: usize, path: &NormalizedPathBuf, source: &'a str,
merge_list: &mut LinkedList<&'a str>,
) {
merge_list.push_back(&source[..version_char_offset]);
self.extra_lines.push(consts::OPTIFINE_PREAMBLE.into());
self.unsafe_get_and_insert(merge_list);
self.add_closing_line_directive(version_line_offset.into() + self.get_line_directive_offset(), path, merge_list);
}
fn add_opening_line_directive(&mut self, path: &NormalizedPathBuf, merge_list: &mut LinkedList<&str>) {
let line_directive = format!("#line 0 {} // {}\n", self.source_mapper.get_num(path), path);
self.extra_lines.push(line_directive);
self.unsafe_get_and_insert(merge_list);
}
fn add_closing_line_directive(&mut self, line: impl Into<usize>, path: &NormalizedPathBuf, merge_list: &mut LinkedList<&str>) {
// Optifine doesn't seem to add a leading newline if the previous line was a #line directive
let line_directive = if let Some(l) = merge_list.back() {
if l.trim().starts_with("#line") {
format!("#line {} {} // {}\n", line.into(), self.source_mapper.get_num(path), path)
} else {
format!("\n#line {} {} // {}\n", line.into(), self.source_mapper.get_num(path), path)
}
} else {
format!("\n#line {} {} // {}\n", line.into(), self.source_mapper.get_num(path), path)
};
self.extra_lines.push(line_directive);
self.unsafe_get_and_insert(merge_list);
}
fn unsafe_get_and_insert(&self, merge_list: &mut LinkedList<&str>) {
// :^)
unsafe {
let vec_ptr_offset = self.extra_lines.as_ptr().add(self.extra_lines.len() - 1);
merge_list.push_back(&vec_ptr_offset.as_ref().unwrap()[..]);
}
}
}
#[cfg(test)]
mod test {
use std::{
fs,
path::{Path, PathBuf},
};
use anyhow::Result;
use filesystem::{LFString, NormalizedPathBuf};
use fs_extra::{copy_items, dir};
use opengl::GPUVendor;
use pretty_assertions::assert_str_eq;
use sourcefile::{SourceMapper, Version};
use tempdir::TempDir;
use workspace_tree::{TreeError, WorkspaceTree};
use crate::MergeViewBuilder;
fn copy_to_tmp_dir(test_path: &str) -> (TempDir, NormalizedPathBuf) {
let tmp_dir = TempDir::new("mcshader").unwrap();
fs::create_dir(tmp_dir.path().join("shaders")).unwrap();
{
let test_path = Path::new(test_path)
.canonicalize()
.unwrap_or_else(|_| panic!("canonicalizing '{}'", test_path));
let opts = &dir::CopyOptions::new();
let files = fs::read_dir(&test_path)
.unwrap()
.map(|e| String::from(e.unwrap().path().to_str().unwrap()))
.collect::<Vec<String>>();
copy_items(&files, &tmp_dir.path().join("shaders"), opts).unwrap();
}
let tmp_path = tmp_dir.path().to_str().unwrap().into();
(tmp_dir, tmp_path)
}
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
#[logging_macro::scope]
async fn test_generate_merge_list_01() {
let (_tmp_dir, tmp_path) = copy_to_tmp_dir("../testdata/01");
let mut workspace = WorkspaceTree::new(&tmp_path.clone());
workspace.build();
let final_path = tmp_path.join("shaders").join("final.fsh");
let common_path = tmp_path.join("shaders").join("common.glsl");
let mut trees_vec = workspace
.trees_for_entry(&final_path)
.expect("expected successful tree initializing")
.collect::<Result<Vec<_>, TreeError>>()
.expect("expected successful tree-building");
let mut trees = trees_vec.iter_mut();
let tree = trees.next().unwrap();
assert!(trees.next().is_none());
let tree = tree
.collect::<Result<Vec<_>, TreeError>>()
.expect("expected successful tree-building");
let mut source_mapper = SourceMapper::new(2);
let result = MergeViewBuilder::new(&tree, &mut source_mapper, GPUVendor::NVIDIA, Version::Glsl120).build();
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
let mut truth = LFString::read(merge_file).await.unwrap();
truth = LFString::from_unchecked(truth.replacen("!!", &final_path.to_string(), 1));
truth = LFString::from_unchecked(truth.replacen("!!", &common_path.to_string(), 1));
truth = LFString::from_unchecked(truth.replace("!!", &final_path.to_string()));
assert_str_eq!(*truth, *result);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
#[logging_macro::scope]
async fn test_generate_merge_list_02() {
let (_tmp_dir, tmp_path) = copy_to_tmp_dir("../testdata/02");
let mut workspace = WorkspaceTree::new(&tmp_path.clone());
workspace.build();
// println!(
// "connected {}. leaf {}",
// workspace.num_connected_entries(),
// // workspace.num_disconnected_entries(),
// );
let final_path = tmp_path.join("shaders").join("final.fsh");
let mut trees_vec = workspace
.trees_for_entry(&final_path)
.expect("expected successful tree initializing")
.collect::<Result<Vec<_>, TreeError>>()
.expect("expected successful tree-building");
let mut trees = trees_vec.iter_mut();
let tree = trees.next().unwrap();
assert!(trees.next().is_none());
let tree = tree
.collect::<Result<Vec<_>, TreeError>>()
.expect("expected successful tree-building");
let mut source_mapper = SourceMapper::new(2);
let result = MergeViewBuilder::new(&tree, &mut source_mapper, GPUVendor::NVIDIA, Version::Glsl120).build();
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
let mut truth = LFString::read(merge_file).await.unwrap();
truth = LFString::from_unchecked(truth.replacen("!!", &tmp_path.join("shaders").join("final.fsh").to_string(), 1));
for file in &["sample.glsl", "burger.glsl", "sample.glsl", "test.glsl", "sample.glsl"] {
let path = tmp_path.clone();
truth = LFString::from_unchecked(truth.replacen("!!", &path.join("shaders").join("utils").join(file).to_string(), 1));
}
truth = LFString::from_unchecked(truth.replacen("!!", &tmp_path.join("shaders").join("final.fsh").to_string(), 1));
assert_str_eq!(*truth, *result);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
#[logging_macro::scope]
async fn test_generate_merge_list_03() {
let (_tmp_dir, tmp_path) = copy_to_tmp_dir("../testdata/03");
let mut workspace = WorkspaceTree::new(&tmp_path.clone());
workspace.build();
let final_path = tmp_path.join("shaders").join("final.fsh");
let mut trees_vec = workspace
.trees_for_entry(&final_path)
.expect("expected successful tree initializing")
.collect::<Result<Vec<_>, TreeError>>()
.expect("expected successful tree-building");
let mut trees = trees_vec.iter_mut();
let tree = trees.next().unwrap();
assert!(trees.next().is_none());
let tree = tree
.collect::<Result<Vec<_>, TreeError>>()
.expect("expected successful tree-building");
let mut source_mapper = SourceMapper::new(2);
let result = MergeViewBuilder::new(&tree, &mut source_mapper, GPUVendor::NVIDIA, Version::Glsl120).build();
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
let mut truth = LFString::read(merge_file).await.unwrap();
truth = LFString::from_unchecked(truth.replacen("!!", &tmp_path.join("shaders").join("final.fsh").to_string(), 1));
for file in &["sample.glsl", "burger.glsl", "sample.glsl", "test.glsl", "sample.glsl"] {
let path = tmp_path.clone();
truth = LFString::from_unchecked(truth.replacen("!!", &path.join("shaders").join("utils").join(file).to_string(), 1));
}
truth = LFString::from_unchecked(truth.replacen("!!", &tmp_path.join("shaders").join("final.fsh").to_string(), 1));
assert_str_eq!(*truth, *result);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
#[logging_macro::scope]
async fn test_generate_merge_list_04() {
let (_tmp_dir, tmp_path) = copy_to_tmp_dir("../testdata/04");
let mut workspace = WorkspaceTree::new(&tmp_path.clone());
workspace.build();
let final_path = tmp_path.join("shaders").join("final.fsh");
let mut trees_vec = workspace
.trees_for_entry(&final_path)
.expect("expected successful tree initializing")
.collect::<Result<Vec<_>, TreeError>>()
.expect("expected successful tree-building");
let mut trees = trees_vec.iter_mut();
let tree = trees.next().unwrap();
assert!(trees.next().is_none());
let tree = tree
.collect::<Result<Vec<_>, TreeError>>()
.expect("expected successful tree-building");
let mut source_mapper = SourceMapper::new(2);
let result = MergeViewBuilder::new(&tree, &mut source_mapper, GPUVendor::NVIDIA, Version::Glsl120).build();
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
let mut truth = LFString::read(merge_file).await.unwrap();
for file in &[
PathBuf::new().join("final.fsh").to_str().unwrap(),
PathBuf::new().join("utils").join("utilities.glsl").to_str().unwrap(),
PathBuf::new().join("utils").join("stuff1.glsl").to_str().unwrap(),
PathBuf::new().join("utils").join("utilities.glsl").to_str().unwrap(),
PathBuf::new().join("utils").join("stuff2.glsl").to_str().unwrap(),
PathBuf::new().join("utils").join("utilities.glsl").to_str().unwrap(),
PathBuf::new().join("final.fsh").to_str().unwrap(),
PathBuf::new().join("lib").join("matrices.glsl").to_str().unwrap(),
PathBuf::new().join("final.fsh").to_str().unwrap(),
] {
let path = tmp_path.clone();
truth = LFString::from_unchecked(truth.replacen("!!", &path.join("shaders").join(file).to_string(), 1));
}
assert_str_eq!(*truth, *result);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
#[logging_macro::scope]
async fn test_generate_merge_list_06() {
let (_tmp_dir, tmp_path) = copy_to_tmp_dir("../testdata/06");
let mut workspace = WorkspaceTree::new(&tmp_path.clone());
workspace.build();
let final_path = tmp_path.join("shaders").join("final.fsh");
let mut trees_vec = workspace
.trees_for_entry(&final_path)
.expect("expected successful tree initializing")
.collect::<Result<Vec<_>, TreeError>>()
.expect("expected successful tree-building");
let mut trees = trees_vec.iter_mut();
let tree = trees.next().unwrap();
assert!(trees.next().is_none());
let tree = tree
.collect::<Result<Vec<_>, TreeError>>()
.expect("expected successful tree-building");
let mut source_mapper = SourceMapper::new(2);
let result = MergeViewBuilder::new(&tree, &mut source_mapper, GPUVendor::NVIDIA, Version::Glsl120).build();
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
let mut truth = LFString::read(merge_file).await.unwrap();
for file in &[
PathBuf::new().join("final.fsh").to_str().unwrap(),
PathBuf::new().join("test.glsl").to_str().unwrap(),
PathBuf::new().join("final.fsh").to_str().unwrap(),
PathBuf::new().join("test.glsl").to_str().unwrap(),
PathBuf::new().join("final.fsh").to_str().unwrap(),
] {
let path = tmp_path.clone();
truth = LFString::from_unchecked(truth.replacen("!!", &path.join("shaders").join(file).to_string(), 1));
}
assert_str_eq!(*truth, *result);
}
}

View file

@ -4,10 +4,19 @@ version = "0.9.8"
authors = ["Noah Santschi-Cooney <noah@santschi-cooney.ch>"]
edition = "2021"
[lib]
doctest = false
[dependencies]
slog = { version = "2.7", features = [ "max_level_trace", "release_max_level_trace" ] }
slog-term = "2.9"
slog-scope = "4.4"
slog-atomic = "3.1"
slog-async = "2.7.0"
slog-stdlog = "4.1.1"
slog-scope-futures = "0.1.1"
log = "0.4.16"
rand = "0.8"
lazy_static = "1.4"
tokio = { version = "1.18.0", features = ["full"] }
logging_macro = { path = "../logging_macro" }

View file

@ -1,6 +1,4 @@
use rand::{rngs, Rng};
use slog::slog_o;
use slog_scope::GlobalLoggerGuard;
use slog_term::{FullFormat, PlainSyncDecorator};
use std::{cell::RefCell, sync::Arc};
@ -10,16 +8,18 @@ use lazy_static::lazy_static;
use slog::*;
use slog_atomic::*;
fn new_trace_id() -> String {
pub use logging_macro::*;
pub use slog_scope::{scope, logger, error, warn, info, trace, debug, GlobalLoggerGuard};
pub use slog::{slog_o, FnValue, Level, Value, Record, Key, Serializer, Result};
pub use slog_scope_futures::FutureExt;
pub fn new_trace_id() -> String {
let rng = CURRENT_RNG.with(|rng| rng.borrow_mut().gen::<[u8; 4]>());
return format!("{:04x}", u32::from_be_bytes(rng));
}
pub fn slog_with_trace_id<F: FnOnce()>(f: F) {
slog_scope::scope(&slog_scope::logger().new(slog_o!("trace" => new_trace_id())), f)
}
pub fn set_logger_with_level(level: Level) -> GlobalLoggerGuard {
pub fn set_level(level: Level) -> GlobalLoggerGuard {
slog_stdlog::init_with_level(log::Level::Trace).err().or(None);
let drain = Arc::new(logger_base(level).fuse());
DRAIN_SWITCH.ctrl().set(drain.clone());
slog_scope::set_global_logger(Logger::root(drain, o!()))

View file

@ -6,6 +6,7 @@ edition = "2021"
[lib]
proc-macro = true
doctest = false
[dependencies]
quote = "1.0"

View file

@ -3,22 +3,36 @@ use quote::quote;
use syn::{parse_macro_input, parse_quote, ItemFn};
#[proc_macro_attribute]
pub fn log_scope(_args: TokenStream, function: TokenStream) -> TokenStream {
pub fn scope(_args: TokenStream, function: TokenStream) -> TokenStream {
let mut function = parse_macro_input!(function as ItemFn);
let function_name = function.sig.ident.to_string();
let stmts = function.block.stmts;
function.block = Box::new(parse_quote!({
use slog::{slog_o, FnValue, Level};
use logging::{slog_o, FnValue, Level, scope, logger};
use std::thread::current;
let _guard = logging::set_logger_with_level(Level::Trace);
slog_scope::scope(&slog_scope::logger().new(slog_o!("test_name" => #function_name, "thread_num" => FnValue(|_| format!("{:?}", current().id())))), || {
let _guard = logging::set_level(Level::Trace);
scope(&logger().new(slog_o!("test_name" => #function_name, "thread_num" => FnValue(|_| format!("{:?}", current().id())))), || {
#(#stmts)*
});
}));
TokenStream::from(quote!(#function))
}
#[proc_macro_attribute]
pub fn with_trace_id(_args: TokenStream, function: TokenStream) -> TokenStream {
let mut function = parse_macro_input!(function as ItemFn);
let stmts = function.block.stmts;
function.block = Box::new(parse_quote!({
use logging::{slog_o, scope, logger, new_trace_id};
scope(&logger().new(slog_o!("trace" => new_trace_id())), || {
#(#stmts)*
})
}));
TokenStream::from(quote!(#function))
}

View file

@ -5,31 +5,42 @@ authors = ["Noah Santschi-Cooney <noah@santschi-cooney.ch>"]
edition = "2021"
[dependencies]
rust_lsp = { git = "https://github.com/Strum355/RustLSP", branch = "master" }
serde_json = "1.0"
serde = "1.0"
walkdir = "2.3"
petgraph = "0.6"
graph = { path = "../graph" }
lazy_static = "1.4"
regex = "1.4"
url = "2.2"
percent-encoding = "2.1"
anyhow = "1.0"
thiserror = "1.0"
glutin = "0.28"
gl = "0.14"
mockall = "0.11"
path-slash = "0.1"
slog = { version = "2.7", features = [ "max_level_trace", "release_max_level_trace" ] }
slog-scope = "4.4"
once_cell = "1.7"
tree-sitter = "0.20.6"
tree-sitter-glsl = "0.1.2"
glob = "0.3"
filesystem = { path = "../filesystem" }
# glutin = "0.28"
gl = "0.14"
anyhow = "1.0"
thiserror = "1.0"
tree-sitter = "0.20"
tree-sitter-glsl = "0.1"
logging = { path = "../logging" }
logging_macro = { path = "../logging_macro" }
server = { path = "../server" }
tower-lsp = "0.17"
tokio = { version = "1.18", features = ["full"] }
futures = "0.3"
workspace = { path = "../workspace" }
opengl = { path = "../opengl" }
sourcefile = { path = "../sourcefile" }
[dev-dependencies]
tempdir = "0.3"
fs_extra = "1.2"
hamcrest2 = "*"
pretty_assertions = "1.1"
tower-test = "0.4"

View file

@ -1,52 +0,0 @@
use std::cell::RefCell;
use std::fs::OpenOptions;
use std::io::prelude::*;
use std::path::Path;
use std::rc::Rc;
use petgraph::dot::Config;
use serde_json::Value;
use petgraph::dot;
use anyhow::{format_err, Result};
use slog_scope::info;
use crate::graph::CachedStableGraph;
use super::Invokeable;
pub struct GraphDotCommand {
pub graph: Rc<RefCell<CachedStableGraph>>,
}
impl Invokeable for GraphDotCommand {
fn run_command(&self, root: &Path, _: &[Value]) -> Result<Value> {
let filepath = root.join("graph.dot");
info!("generating dot file"; "path" => filepath.as_os_str().to_str());
let mut file = OpenOptions::new().truncate(true).write(true).create(true).open(filepath).unwrap();
let mut write_data_closure = || -> Result<(), std::io::Error> {
let graph = self.graph.as_ref();
file.seek(std::io::SeekFrom::Start(0))?;
file.write_all("digraph {\n\tgraph [splines=ortho]\n\tnode [shape=box]\n".as_bytes())?;
file.write_all(
dot::Dot::with_config(&graph.borrow().graph, &[Config::GraphContentOnly])
.to_string()
.as_bytes(),
)?;
file.write_all("\n}".as_bytes())?;
file.flush()?;
file.seek(std::io::SeekFrom::Start(0))?;
Ok(())
};
match write_data_closure() {
Err(err) => Err(format_err!("error generating graphviz data: {}", err)),
_ => Ok(Value::Null),
}
}
}

View file

@ -1,114 +0,0 @@
use std::cell::RefCell;
use std::rc::Rc;
use std::{
collections::HashMap,
path::{Path, PathBuf},
};
use serde_json::Value;
use petgraph::graph::NodeIndex;
use anyhow::{format_err, Result};
use std::fs;
use crate::dfs;
use crate::merge_views::FilialTuple;
use crate::source_mapper::SourceMapper;
use crate::{graph::CachedStableGraph, merge_views, url_norm::FromJson};
use super::Invokeable;
pub struct VirtualMergedDocument {
pub graph: Rc<RefCell<CachedStableGraph>>,
}
impl VirtualMergedDocument {
// TODO: DUPLICATE CODE
fn get_file_toplevel_ancestors(&self, uri: &Path) -> Result<Option<Vec<petgraph::stable_graph::NodeIndex>>> {
let curr_node = match self.graph.borrow_mut().find_node(uri) {
Some(n) => n,
None => return Err(format_err!("node not found {:?}", uri)),
};
let roots = self.graph.borrow().collect_root_ancestors(curr_node);
if roots.is_empty() {
return Ok(None);
}
Ok(Some(roots))
}
pub fn get_dfs_for_node(&self, root: NodeIndex) -> Result<Vec<FilialTuple>, dfs::error::CycleError> {
let graph_ref = self.graph.borrow();
let dfs = dfs::Dfs::new(&graph_ref, root);
dfs.collect::<Result<Vec<_>, _>>()
}
pub fn load_sources(&self, nodes: &[FilialTuple]) -> Result<HashMap<PathBuf, String>> {
let mut sources = HashMap::new();
for node in nodes {
let graph = self.graph.borrow();
let path = graph.get_node(node.child);
if sources.contains_key(&path) {
continue;
}
let source = match fs::read_to_string(&path) {
Ok(s) => s,
Err(e) => return Err(format_err!("error reading {:?}: {}", path, e)),
};
let source = source.replace("\r\n", "\n");
sources.insert(path.clone(), source);
}
Ok(sources)
}
}
impl Invokeable for VirtualMergedDocument {
fn run_command(&self, root: &Path, arguments: &[Value]) -> Result<Value> {
let path = PathBuf::from_json(arguments.get(0).unwrap())?;
let file_ancestors = match self.get_file_toplevel_ancestors(&path) {
Ok(opt) => match opt {
Some(ancestors) => ancestors,
None => vec![],
},
Err(e) => return Err(e),
};
//info!("ancestors for {}:\n\t{:?}", path, file_ancestors.iter().map(|e| self.graph.borrow().graph.node_weight(*e).unwrap().clone()).collect::<Vec<String>>());
// the set of all filepath->content. TODO: change to Url?
let mut all_sources: HashMap<PathBuf, String> = HashMap::new();
// if we are a top-level file (this has to be one of the set defined by Optifine, right?)
if file_ancestors.is_empty() {
// gather the list of all descendants
let root = self.graph.borrow_mut().find_node(&path).unwrap();
let tree = match self.get_dfs_for_node(root) {
Ok(tree) => tree,
Err(e) => return Err(e.into()),
};
let sources = match self.load_sources(&tree) {
Ok(s) => s,
Err(e) => return Err(e),
};
all_sources.extend(sources);
let mut source_mapper = SourceMapper::new(all_sources.len());
let graph = self.graph.borrow();
let view = merge_views::MergeViewBuilder::new(&tree, &all_sources, &graph, &mut source_mapper).build();
return Ok(serde_json::value::Value::String(view));
}
return Err(format_err!(
"{:?} is not a top-level file aka has ancestors",
path.strip_prefix(root).unwrap()
));
}
}

View file

@ -1,36 +0,0 @@
use std::{collections::HashMap, path::Path};
use serde_json::Value;
use anyhow::{format_err, Result};
use slog_scope::info;
pub mod graph_dot;
pub mod merged_includes;
pub mod parse_tree;
pub struct CustomCommandProvider {
commands: HashMap<String, Box<dyn Invokeable>>,
}
impl CustomCommandProvider {
pub fn new(commands: Vec<(&str, Box<dyn Invokeable>)>) -> CustomCommandProvider {
CustomCommandProvider {
commands: commands.into_iter().map(|tup| (tup.0.into(), tup.1)).collect(),
}
}
pub fn execute(&self, command: &str, args: &[Value], root_path: &Path) -> Result<Value> {
if self.commands.contains_key(command) {
info!("running command";
"command" => command,
"args" => format!("[{}]", args.iter().map(|v| serde_json::to_string(v).unwrap()).collect::<Vec<String>>().join(", ")));
return self.commands.get(command).unwrap().run_command(root_path, args);
}
Err(format_err!("command doesn't exist"))
}
}
pub trait Invokeable {
fn run_command(&self, root: &Path, arguments: &[Value]) -> Result<Value>;
}

View file

@ -1,8 +1,6 @@
use std::str::FromStr;
use slog::Level;
use slog_scope::error;
use logging::{Level, error};
pub fn handle_log_level_change<F: FnOnce(Level)>(log_level: String, callback: F) {
match Level::from_str(log_level.as_str()) {

View file

@ -1,4 +0,0 @@
pub static SOURCE: &str = "mc-glsl";
#[allow(dead_code)]
pub static INCLUDE_DIRECTIVE: &str = "#extension GL_GOOGLE_include_directive : require\n";

View file

@ -1,335 +0,0 @@
use petgraph::stable_graph::NodeIndex;
use crate::{graph::CachedStableGraph, merge_views::FilialTuple};
use anyhow::Result;
struct VisitCount {
node: NodeIndex,
touch: usize,
children: usize,
}
/// Performs a depth-first search with duplicates
pub struct Dfs<'a> {
stack: Vec<NodeIndex>,
graph: &'a CachedStableGraph,
cycle: Vec<VisitCount>,
}
impl<'a> Dfs<'a> {
pub fn new(graph: &'a CachedStableGraph, start: NodeIndex) -> Self {
Dfs {
stack: vec![start],
graph,
cycle: Vec::new(),
}
}
fn reset_path_to_branch(&mut self) {
while let Some(par) = self.cycle.last_mut() {
par.touch += 1;
if par.touch > par.children {
self.cycle.pop();
} else {
break;
}
}
}
fn check_for_cycle(&self, children: &[NodeIndex]) -> Result<(), error::CycleError> {
for prev in &self.cycle {
for child in children {
if prev.node == *child {
let cycle_nodes: Vec<NodeIndex> = self.cycle.iter().map(|n| n.node).collect();
return Err(error::CycleError::new(&cycle_nodes, *child, self.graph));
}
}
}
Ok(())
}
}
impl<'a> Iterator for Dfs<'a> {
type Item = Result<FilialTuple, error::CycleError>;
fn next(&mut self) -> Option<Result<FilialTuple, error::CycleError>> {
let parent = self.cycle.last().map(|p| p.node);
if let Some(child) = self.stack.pop() {
self.cycle.push(VisitCount {
node: child,
children: self.graph.graph.edges(child).count(),
touch: 1,
});
let mut children: Vec<_> = self
.graph
.get_all_child_positions(child)
.collect();
children.reverse();
if !children.is_empty() {
let child_indexes: Vec<_> = children.iter().map(|c| c.0).collect();
match self.check_for_cycle(&child_indexes) {
Ok(_) => {}
Err(e) => return Some(Err(e)),
};
for child in children {
self.stack.push(child.0);
}
} else {
self.reset_path_to_branch();
}
return Some(Ok(FilialTuple { child, parent }));
}
None
}
}
pub mod error {
use petgraph::stable_graph::NodeIndex;
use std::{
error::Error as StdError,
fmt::{Debug, Display},
path::PathBuf,
};
use crate::{consts, graph::CachedStableGraph};
use rust_lsp::lsp_types::{Diagnostic, DiagnosticSeverity, Position, Range};
#[derive(Debug)]
pub struct CycleError(Vec<PathBuf>);
impl StdError for CycleError {}
impl CycleError {
pub fn new(nodes: &[NodeIndex], current_node: NodeIndex, graph: &CachedStableGraph) -> Self {
let mut resolved_nodes: Vec<PathBuf> = nodes.iter().map(|i| graph.get_node(*i)).collect();
resolved_nodes.push(graph.get_node(current_node));
CycleError(resolved_nodes)
}
}
impl Display for CycleError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut disp = String::new();
disp.push_str(format!("Include cycle detected:\n{:?} imports ", self.0[0]).as_str());
for p in &self.0[1..self.0.len() - 1] {
disp.push_str(format!("\n{:?}, which imports ", *p).as_str());
}
disp.push_str(format!("\n{:?}", self.0[self.0.len() - 1]).as_str());
f.write_str(disp.as_str())
}
}
impl From<CycleError> for Diagnostic {
fn from(e: CycleError) -> Diagnostic {
Diagnostic {
severity: Some(DiagnosticSeverity::ERROR),
range: Range::new(Position::new(0, 0), Position::new(0, 500)),
source: Some(consts::SOURCE.into()),
message: e.into(),
code: None,
tags: None,
related_information: None,
code_description: Option::None,
data: Option::None,
}
}
}
impl From<CycleError> for String {
fn from(e: CycleError) -> String {
format!("{}", e)
}
}
}
#[cfg(test)]
mod dfs_test {
use std::path::PathBuf;
use hamcrest2::prelude::*;
use hamcrest2::{assert_that, ok};
use petgraph::{algo::is_cyclic_directed, graph::NodeIndex};
use crate::graph::CachedStableGraph;
use crate::{dfs, IncludePosition};
#[test]
#[logging_macro::log_scope]
fn test_graph_dfs() {
{
let mut graph = CachedStableGraph::new();
let idx0 = graph.add_node(&PathBuf::from("0"));
let idx1 = graph.add_node(&PathBuf::from("1"));
let idx2 = graph.add_node(&PathBuf::from("2"));
let idx3 = graph.add_node(&PathBuf::from("3"));
graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 });
graph.add_edge(idx0, idx2, IncludePosition { line: 3, start: 0, end: 0 });
graph.add_edge(idx1, idx3, IncludePosition { line: 5, start: 0, end: 0 });
let dfs = dfs::Dfs::new(&graph, idx0);
let mut collection = Vec::new();
for i in dfs {
assert_that!(&i, ok());
collection.push(i.unwrap());
}
let nodes: Vec<NodeIndex> = collection.iter().map(|n| n.child).collect();
let parents: Vec<Option<NodeIndex>> = collection.iter().map(|n| n.parent).collect();
// 0
// / \
// 1 2
// /
// 3
let expected_nodes = vec![idx0, idx1, idx3, idx2];
assert_eq!(expected_nodes, nodes);
let expected_parents = vec![None, Some(idx0), Some(idx1), Some(idx0)];
assert_eq!(expected_parents, parents);
assert!(!is_cyclic_directed(&graph.graph));
}
{
let mut graph = CachedStableGraph::new();
let idx0 = graph.add_node(&PathBuf::from("0"));
let idx1 = graph.add_node(&PathBuf::from("1"));
let idx2 = graph.add_node(&PathBuf::from("2"));
let idx3 = graph.add_node(&PathBuf::from("3"));
let idx4 = graph.add_node(&PathBuf::from("4"));
let idx5 = graph.add_node(&PathBuf::from("5"));
let idx6 = graph.add_node(&PathBuf::from("6"));
let idx7 = graph.add_node(&PathBuf::from("7"));
graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 });
graph.add_edge(idx0, idx2, IncludePosition { line: 3, start: 0, end: 0 });
graph.add_edge(idx1, idx3, IncludePosition { line: 5, start: 0, end: 0 });
graph.add_edge(idx1, idx4, IncludePosition { line: 6, start: 0, end: 0 });
graph.add_edge(idx2, idx4, IncludePosition { line: 5, start: 0, end: 0 });
graph.add_edge(idx2, idx5, IncludePosition { line: 4, start: 0, end: 0 });
graph.add_edge(idx3, idx6, IncludePosition { line: 4, start: 0, end: 0 });
graph.add_edge(idx4, idx6, IncludePosition { line: 4, start: 0, end: 0 });
graph.add_edge(idx6, idx7, IncludePosition { line: 4, start: 0, end: 0 });
let dfs = dfs::Dfs::new(&graph, idx0);
let mut collection = Vec::new();
for i in dfs {
assert_that!(&i, ok());
collection.push(i.unwrap());
}
let nodes: Vec<NodeIndex> = collection.iter().map(|n| n.child).collect();
let parents: Vec<Option<NodeIndex>> = collection.iter().map(|n| n.parent).collect();
// 0
// / \
// 1 2
// / \ / \
// 3 4 5
// \ /
// 6 - 7
let expected_nodes = vec![idx0, idx1, idx3, idx6, idx7, idx4, idx6, idx7, idx2, idx5, idx4, idx6, idx7];
assert_eq!(expected_nodes, nodes);
let expected_parents = vec![
None,
Some(idx0),
Some(idx1),
Some(idx3),
Some(idx6),
Some(idx1),
Some(idx4),
Some(idx6),
Some(idx0),
Some(idx2),
Some(idx2),
Some(idx4),
Some(idx6),
];
assert_eq!(expected_parents, parents);
assert!(!is_cyclic_directed(&graph.graph));
}
}
#[test]
#[logging_macro::log_scope]
fn test_graph_dfs_cycle() {
{
let mut graph = CachedStableGraph::new();
let idx0 = graph.add_node(&PathBuf::from("0"));
let idx1 = graph.add_node(&PathBuf::from("1"));
let idx2 = graph.add_node(&PathBuf::from("2"));
let idx3 = graph.add_node(&PathBuf::from("3"));
let idx4 = graph.add_node(&PathBuf::from("4"));
let idx5 = graph.add_node(&PathBuf::from("5"));
let idx6 = graph.add_node(&PathBuf::from("6"));
let idx7 = graph.add_node(&PathBuf::from("7"));
graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 });
graph.add_edge(idx0, idx2, IncludePosition { line: 3, start: 0, end: 0 });
graph.add_edge(idx1, idx3, IncludePosition { line: 5, start: 0, end: 0 });
graph.add_edge(idx1, idx4, IncludePosition { line: 6, start: 0, end: 0 });
graph.add_edge(idx2, idx4, IncludePosition { line: 5, start: 0, end: 0 });
graph.add_edge(idx2, idx5, IncludePosition { line: 4, start: 0, end: 0 });
graph.add_edge(idx3, idx6, IncludePosition { line: 4, start: 0, end: 0 });
graph.add_edge(idx4, idx6, IncludePosition { line: 4, start: 0, end: 0 });
graph.add_edge(idx6, idx7, IncludePosition { line: 4, start: 0, end: 0 });
graph.add_edge(idx7, idx4, IncludePosition { line: 4, start: 0, end: 0 });
let mut dfs = dfs::Dfs::new(&graph, idx0);
for _ in 0..5 {
if let Some(i) = dfs.next() {
assert_that!(&i, ok());
}
}
// 0
// / \
// 1 2
// / \ / \
// 3 4 5
// \ / \
// 6 - 7
let next = dfs.next().unwrap();
assert_that!(next, err());
assert!(is_cyclic_directed(&graph.graph));
}
{
let mut graph = CachedStableGraph::new();
let idx0 = graph.add_node(&PathBuf::from("0"));
let idx1 = graph.add_node(&PathBuf::from("1"));
graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 });
graph.add_edge(idx1, idx0, IncludePosition { line: 2, start: 0, end: 0 });
let mut dfs = dfs::Dfs::new(&graph, idx1);
println!("{:?}", dfs.next());
println!("{:?}", dfs.next());
println!("{:?}", dfs.next());
}
}
}

View file

@ -1,374 +0,0 @@
use petgraph::stable_graph::EdgeIndex;
use petgraph::stable_graph::NodeIndex;
use petgraph::stable_graph::StableDiGraph;
use petgraph::visit::EdgeRef;
use petgraph::Direction;
use std::{
collections::{HashMap, HashSet},
path::{Path, PathBuf},
str::FromStr,
};
use super::IncludePosition;
/// Wraps a `StableDiGraph` with caching behaviour for node search by maintaining
/// an index for node value to node index and a reverse index.
/// This allows for **O(1)** lookup for a value if it exists, else **O(n)**.
pub struct CachedStableGraph {
// StableDiGraph is used as it allows for String node values, essential for
// generating the GraphViz DOT render.
pub graph: StableDiGraph<String, IncludePosition>,
cache: HashMap<PathBuf, NodeIndex>,
// Maps a node index to its abstracted string representation.
// Mainly used as the graph is based on NodeIndex.
reverse_index: HashMap<NodeIndex, PathBuf>,
}
impl CachedStableGraph {
#[allow(clippy::new_without_default)]
pub fn new() -> CachedStableGraph {
CachedStableGraph {
graph: StableDiGraph::new(),
cache: HashMap::new(),
reverse_index: HashMap::new(),
}
}
/// Returns the `NodeIndex` for a given graph node with the value of `name`
/// and caches the result in the `HashMap`. Complexity is **O(1)** if the value
/// is cached (which should always be the case), else **O(n)** where **n** is
/// the number of node indices, as an exhaustive search must be done.
pub fn find_node(&mut self, name: &Path) -> Option<NodeIndex> {
match self.cache.get(name) {
Some(n) => Some(*n),
None => {
// If the string is not in cache, O(n) search the graph (i know...) and then cache the NodeIndex
// for later
let n = self.graph.node_indices().find(|n| self.graph[*n] == name.to_str().unwrap());
if let Some(n) = n {
self.cache.insert(name.into(), n);
}
n
}
}
}
// Returns the `PathBuf` for a given `NodeIndex`
pub fn get_node(&self, node: NodeIndex) -> PathBuf {
PathBuf::from_str(&self.graph[node]).unwrap()
}
/// Returns an iterator over all the `IncludePosition`'s between a parent and its child for all the positions
/// that the child may be imported into the parent, in order of import.
pub fn get_child_positions(&self, parent: NodeIndex, child: NodeIndex) -> impl Iterator<Item = IncludePosition> + '_ {
let mut edges = self
.graph
.edges(parent)
.filter_map(move |edge| {
let target = self.graph.edge_endpoints(edge.id()).unwrap().1;
if target != child {
return None;
}
Some(self.graph[edge.id()])
})
.collect::<Vec<IncludePosition>>();
edges.sort_by(|x, y| x.line.cmp(&y.line));
edges.into_iter()
}
/// Returns an iterator over all the `(NodeIndex, IncludePosition)` tuples between a node and all its children, in order
/// of import.
pub fn get_all_child_positions(&self, node: NodeIndex) -> impl Iterator<Item = (NodeIndex, IncludePosition)> + '_ {
let mut edges = self.graph.edges(node).map(|edge| {
let child = self.graph.edge_endpoints(edge.id()).unwrap().1;
(child, self.graph[edge.id()])
})
.collect::<Vec<_>>();
edges.sort_by(|x, y| x.1.line.cmp(&y.1.line));
edges.into_iter()
}
pub fn add_node(&mut self, name: &Path) -> NodeIndex {
if let Some(idx) = self.cache.get(name) {
return *idx;
}
let idx = self.graph.add_node(name.to_str().unwrap().to_string());
self.cache.insert(name.to_owned(), idx);
self.reverse_index.insert(idx, name.to_owned());
idx
}
pub fn add_edge(&mut self, parent: NodeIndex, child: NodeIndex, meta: IncludePosition) -> EdgeIndex {
self.graph.add_edge(parent, child, meta)
}
pub fn remove_edge(&mut self, parent: NodeIndex, child: NodeIndex, position: IncludePosition) {
self.graph
.edges(parent)
.find(|edge| self.graph.edge_endpoints(edge.id()).unwrap().1 == child && *edge.weight() == position)
.map(|edge| edge.id())
.and_then(|edge| self.graph.remove_edge(edge));
}
pub fn child_node_indexes(&self, node: NodeIndex) -> impl Iterator<Item = NodeIndex> + '_ {
self.graph.neighbors(node)
}
pub fn collect_root_ancestors(&self, node: NodeIndex) -> Vec<NodeIndex> {
let mut visited = HashSet::new();
self.get_root_ancestors(node, node, &mut visited)
}
// TODO: impl Iterator
fn parent_node_indexes(&self, node: NodeIndex) -> Vec<NodeIndex> {
self.graph.neighbors_directed(node, Direction::Incoming).collect()
}
fn get_root_ancestors(&self, initial: NodeIndex, node: NodeIndex, visited: &mut HashSet<NodeIndex>) -> Vec<NodeIndex> {
if node == initial && !visited.is_empty() {
return vec![];
}
let parents = self.parent_node_indexes(node);
let mut collection = Vec::with_capacity(parents.len());
for ancestor in &parents {
visited.insert(*ancestor);
}
for ancestor in &parents {
let ancestors = self.parent_node_indexes(*ancestor);
if !ancestors.is_empty() {
collection.extend(self.get_root_ancestors(initial, *ancestor, visited));
} else {
collection.push(*ancestor);
}
}
collection
}
}
#[cfg(test)]
impl CachedStableGraph {
fn parent_node_names(&self, node: NodeIndex) -> Vec<PathBuf> {
self.graph
.neighbors_directed(node, Direction::Incoming)
.map(|n| self.reverse_index.get(&n).unwrap().clone())
.collect()
}
fn child_node_names(&self, node: NodeIndex) -> Vec<PathBuf> {
self.graph
.neighbors(node)
.map(|n| self.reverse_index.get(&n).unwrap().clone())
.collect()
}
fn remove_node(&mut self, name: &Path) {
let idx = self.cache.remove(name);
if let Some(idx) = idx {
self.graph.remove_node(idx);
}
}
}
#[cfg(test)]
mod graph_test {
use std::path::PathBuf;
use petgraph::graph::NodeIndex;
use crate::{graph::CachedStableGraph, IncludePosition};
#[test]
#[logging_macro::log_scope]
fn test_graph_two_connected_nodes() {
let mut graph = CachedStableGraph::new();
let idx1 = graph.add_node(&PathBuf::from("sample"));
let idx2 = graph.add_node(&PathBuf::from("banana"));
graph.add_edge(idx1, idx2, IncludePosition { line: 3, start: 0, end: 0 });
let children = graph.child_node_names(idx1);
assert_eq!(children.len(), 1);
assert_eq!(children[0], Into::<PathBuf>::into("banana".to_string()));
let children: Vec<NodeIndex> = graph.child_node_indexes(idx1).collect();
assert_eq!(children.len(), 1);
assert_eq!(children[0], idx2);
let parents = graph.parent_node_names(idx1);
assert_eq!(parents.len(), 0);
let parents = graph.parent_node_names(idx2);
assert_eq!(parents.len(), 1);
assert_eq!(parents[0], Into::<PathBuf>::into("sample".to_string()));
let parents = graph.parent_node_indexes(idx2);
assert_eq!(parents.len(), 1);
assert_eq!(parents[0], idx1);
let ancestors = graph.collect_root_ancestors(idx2);
assert_eq!(ancestors.len(), 1);
assert_eq!(ancestors[0], idx1);
let ancestors = graph.collect_root_ancestors(idx1);
assert_eq!(ancestors.len(), 0);
graph.remove_node(&PathBuf::from("sample"));
assert_eq!(graph.graph.node_count(), 1);
assert!(graph.find_node(&PathBuf::from("sample")).is_none());
let neighbors = graph.child_node_names(idx2);
assert_eq!(neighbors.len(), 0);
}
#[test]
#[logging_macro::log_scope]
fn test_double_import() {
let mut graph = CachedStableGraph::new();
let idx0 = graph.add_node(&PathBuf::from("0"));
let idx1 = graph.add_node(&PathBuf::from("1"));
graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 });
graph.add_edge(idx0, idx1, IncludePosition { line: 4, start: 0, end: 0 });
// 0
// / \
// 1 1
assert_eq!(2, graph.get_child_positions(idx0, idx1).count());
let mut edge_metas = graph.get_child_positions(idx0, idx1);
assert_eq!(Some(IncludePosition { line: 2, start: 0, end: 0 }), edge_metas.next());
assert_eq!(Some(IncludePosition { line: 4, start: 0, end: 0 }), edge_metas.next());
}
#[test]
#[logging_macro::log_scope]
fn test_collect_root_ancestors() {
{
let mut graph = CachedStableGraph::new();
let idx0 = graph.add_node(&PathBuf::from("0"));
let idx1 = graph.add_node(&PathBuf::from("1"));
let idx2 = graph.add_node(&PathBuf::from("2"));
let idx3 = graph.add_node(&PathBuf::from("3"));
graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 });
graph.add_edge(idx1, idx2, IncludePosition { line: 3, start: 0, end: 0 });
graph.add_edge(idx3, idx1, IncludePosition { line: 4, start: 0, end: 0 });
// 0 3
// |/
// 1
// |
// 2
let roots = graph.collect_root_ancestors(idx2);
assert_eq!(roots, vec![idx3, idx0]);
let roots = graph.collect_root_ancestors(idx1);
assert_eq!(roots, vec![idx3, idx0]);
let roots = graph.collect_root_ancestors(idx0);
assert_eq!(roots, vec![]);
let roots = graph.collect_root_ancestors(idx3);
assert_eq!(roots, vec![]);
}
{
let mut graph = CachedStableGraph::new();
let idx0 = graph.add_node(&PathBuf::from("0"));
let idx1 = graph.add_node(&PathBuf::from("1"));
let idx2 = graph.add_node(&PathBuf::from("2"));
let idx3 = graph.add_node(&PathBuf::from("3"));
graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 });
graph.add_edge(idx0, idx2, IncludePosition { line: 3, start: 0, end: 0 });
graph.add_edge(idx1, idx3, IncludePosition { line: 5, start: 0, end: 0 });
// 0
// / \
// 1 2
// /
// 3
let roots = graph.collect_root_ancestors(idx3);
assert_eq!(roots, vec![idx0]);
let roots = graph.collect_root_ancestors(idx2);
assert_eq!(roots, vec![idx0]);
let roots = graph.collect_root_ancestors(idx1);
assert_eq!(roots, vec![idx0]);
let roots = graph.collect_root_ancestors(idx0);
assert_eq!(roots, vec![]);
}
{
let mut graph = CachedStableGraph::new();
let idx0 = graph.add_node(&PathBuf::from("0"));
let idx1 = graph.add_node(&PathBuf::from("1"));
let idx2 = graph.add_node(&PathBuf::from("2"));
let idx3 = graph.add_node(&PathBuf::from("3"));
graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 });
graph.add_edge(idx2, idx3, IncludePosition { line: 3, start: 0, end: 0 });
graph.add_edge(idx1, idx3, IncludePosition { line: 5, start: 0, end: 0 });
// 0
// \
// 2 1
// \ /
// 3
let roots = graph.collect_root_ancestors(idx3);
assert_eq!(roots, vec![idx0, idx2]);
let roots = graph.collect_root_ancestors(idx2);
assert_eq!(roots, vec![]);
let roots = graph.collect_root_ancestors(idx1);
assert_eq!(roots, vec![idx0]);
let roots = graph.collect_root_ancestors(idx0);
assert_eq!(roots, vec![]);
}
{
let mut graph = CachedStableGraph::new();
let idx0 = graph.add_node(&PathBuf::from("0"));
let idx1 = graph.add_node(&PathBuf::from("1"));
let idx2 = graph.add_node(&PathBuf::from("2"));
let idx3 = graph.add_node(&PathBuf::from("3"));
graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 });
graph.add_edge(idx1, idx2, IncludePosition { line: 4, start: 0, end: 0 });
graph.add_edge(idx1, idx3, IncludePosition { line: 6, start: 0, end: 0 });
// 0
// |
// 1
// / \
// 2 3
let roots = graph.collect_root_ancestors(idx3);
assert_eq!(roots, vec![idx0]);
let roots = graph.collect_root_ancestors(idx2);
assert_eq!(roots, vec![idx0]);
let roots = graph.collect_root_ancestors(idx1);
assert_eq!(roots, vec![idx0]);
let roots = graph.collect_root_ancestors(idx0);
assert_eq!(roots, vec![]);
}
}
}

View file

@ -1,16 +0,0 @@
use rust_lsp::lsp_types::notification::Notification;
use serde::{Deserialize, Serialize};
pub enum Status {}
impl Notification for Status {
type Params = StatusParams;
const METHOD: &'static str = "mc-glsl/status";
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
pub struct StatusParams {
pub status: String,
pub message: Option<String>,
pub icon: Option<String>,
}

View file

@ -1,933 +1,20 @@
#![feature(once_cell)]
#![feature(option_get_or_insert_default)]
use logging::{logger, FutureExt};
use server::Server;
use tower_lsp::LspService;
use merge_views::FilialTuple;
use rust_lsp::jsonrpc::{method_types::*, *};
use rust_lsp::lsp::*;
use rust_lsp::lsp_types::{notification::*, *};
use petgraph::stable_graph::NodeIndex;
use serde::Deserialize;
use serde_json::{from_value, Value};
use tree_sitter::Parser;
use url_norm::FromUrl;
use walkdir::WalkDir;
use std::collections::{HashMap, HashSet};
use std::convert::TryFrom;
use std::fmt::{Debug, Display, Formatter};
use std::fs;
use std::io::{stdin, stdout, BufRead, BufReader};
use std::iter::{Extend, FromIterator};
use std::rc::Rc;
use std::str::FromStr;
use std::{
cell::RefCell,
path::{Path, PathBuf},
};
use slog::Level;
use slog_scope::{debug, error, info, warn};
use path_slash::PathBufExt;
use anyhow::{anyhow, Result};
use regex::Regex;
use lazy_static::lazy_static;
mod commands;
mod configuration;
mod consts;
mod dfs;
mod diagnostics_parser;
mod graph;
mod linemap;
mod lsp_ext;
mod merge_views;
mod navigation;
mod opengl;
mod source_mapper;
mod url_norm;
#[cfg(test)]
mod test;
#[tokio::main]
async fn main() {
let _guard = logging::set_level(logging::Level::Debug);
lazy_static! {
static ref RE_INCLUDE: Regex = Regex::new(r#"^(?:\s)*?(?:#include) "(.+)"\r?"#).unwrap();
static ref TOPLEVEL_FILES: HashSet<PathBuf> = {
let mut set = HashSet::with_capacity(6864);
for folder in ["shaders/", "shaders/world0/", "shaders/world1/", "shaders/world-1/"] {
for ext in ["fsh", "vsh", "gsh", "csh"] {
set.insert(format!("{}composite.{}", folder, ext).into());
for i in 1..=99 {
set.insert(format!("{}composite{}.{}", folder, i, ext).into());
set.insert(format!("{}deferred{}.{}", folder, i, ext).into());
set.insert(format!("{}prepare{}.{}", folder, i, ext).into());
set.insert(format!("{}shadowcomp{}.{}", folder, i, ext).into());
}
set.insert(format!("{}composite_pre.{}", folder, ext).into());
set.insert(format!("{}deferred.{}", folder, ext).into());
set.insert(format!("{}deferred_pre.{}", folder, ext).into());
set.insert(format!("{}final.{}", folder, ext).into());
set.insert(format!("{}gbuffers_armor_glint.{}", folder, ext).into());
set.insert(format!("{}gbuffers_basic.{}", folder, ext).into());
set.insert(format!("{}gbuffers_beaconbeam.{}", folder, ext).into());
set.insert(format!("{}gbuffers_block.{}", folder, ext).into());
set.insert(format!("{}gbuffers_clouds.{}", folder, ext).into());
set.insert(format!("{}gbuffers_damagedblock.{}", folder, ext).into());
set.insert(format!("{}gbuffers_entities.{}", folder, ext).into());
set.insert(format!("{}gbuffers_entities_glowing.{}", folder, ext).into());
set.insert(format!("{}gbuffers_hand.{}", folder, ext).into());
set.insert(format!("{}gbuffers_hand.{}", folder, ext).into());
set.insert(format!("{}gbuffers_hand_water.{}", folder, ext).into());
set.insert(format!("{}gbuffers_item.{}", folder, ext).into());
set.insert(format!("{}gbuffers_line.{}", folder, ext).into());
set.insert(format!("{}gbuffers_skybasic.{}", folder, ext).into());
set.insert(format!("{}gbuffers_skytextured.{}", folder, ext).into());
set.insert(format!("{}gbuffers_spidereyes.{}", folder, ext).into());
set.insert(format!("{}gbuffers_terrain.{}", folder, ext).into());
set.insert(format!("{}gbuffers_terrain_cutout.{}", folder, ext).into());
set.insert(format!("{}gbuffers_terrain_cutout_mip.{}", folder, ext).into());
set.insert(format!("{}gbuffers_terrain_solid.{}", folder, ext).into());
set.insert(format!("{}gbuffers_textured.{}", folder, ext).into());
set.insert(format!("{}gbuffers_textured_lit.{}", folder, ext).into());
set.insert(format!("{}gbuffers_water.{}", folder, ext).into());
set.insert(format!("{}gbuffers_weather.{}", folder, ext).into());
set.insert(format!("{}prepare.{}", folder, ext).into());
set.insert(format!("{}shadow.{}", folder, ext).into());
set.insert(format!("{}shadow_cutout.{}", folder, ext).into());
set.insert(format!("{}shadow_solid.{}", folder, ext).into());
set.insert(format!("{}shadowcomp.{}", folder, ext).into());
}
}
set
};
}
fn main() {
let guard = logging::set_logger_with_level(Level::Info);
let endpoint_output = LSPEndpoint::create_lsp_output_with_output_stream(stdout);
let cache_graph = graph::CachedStableGraph::new();
let mut parser = Parser::new();
parser.set_language(tree_sitter_glsl::language()).unwrap();
let mut langserver = MinecraftShaderLanguageServer {
endpoint: endpoint_output.clone(),
graph: Rc::new(RefCell::new(cache_graph)),
root: "".into(),
command_provider: None,
opengl_context: Rc::new(opengl::OpenGlContext::new()),
tree_sitter: Rc::new(RefCell::new(parser)),
log_guard: Some(guard),
};
langserver.command_provider = Some(commands::CustomCommandProvider::new(vec![
(
"graphDot",
Box::new(commands::graph_dot::GraphDotCommand {
graph: langserver.graph.clone(),
}),
),
(
"virtualMerge",
Box::new(commands::merged_includes::VirtualMergedDocument {
graph: langserver.graph.clone(),
}),
),
(
"parseTree",
Box::new(commands::parse_tree::TreeSitterSExpr {
tree_sitter: langserver.tree_sitter.clone(),
}),
),
]));
LSPEndpoint::run_server_from_input(&mut stdin().lock(), endpoint_output, langserver);
}
pub struct MinecraftShaderLanguageServer {
endpoint: Endpoint,
graph: Rc<RefCell<graph::CachedStableGraph>>,
root: PathBuf,
command_provider: Option<commands::CustomCommandProvider>,
opengl_context: Rc<dyn opengl::ShaderValidator>,
tree_sitter: Rc<RefCell<Parser>>,
log_guard: Option<slog_scope::GlobalLoggerGuard>,
}
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct IncludePosition {
// the 0-indexed line on which the include lives.
line: usize,
// the 0-indexed char offset defining the start of the include path string.
start: usize,
// the 0-indexed char offset defining the end of the include path string.
end: usize,
}
impl Debug for IncludePosition {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "{{line: {}}}", self.line)
}
}
impl Display for IncludePosition {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> {
write!(f, "{{line: {}}}", self.line)
}
}
#[derive(Debug)]
pub enum TreeType {
Fragment,
Vertex,
Geometry,
Compute,
}
impl MinecraftShaderLanguageServer {
pub fn error_not_available<DATA>(data: DATA) -> MethodError<DATA> {
let msg = "Functionality not implemented.".to_string();
MethodError::<DATA> {
code: 1,
message: msg,
data,
}
}
fn build_initial_graph(&self) {
info!("generating graph for current root"; "root" => self.root.to_str().unwrap());
// filter directories and files not ending in any of the 3 extensions
WalkDir::new(&self.root)
.into_iter()
.filter_map(|entry| {
if entry.is_err() {
return None;
}
let entry = entry.unwrap();
let path = entry.path();
if path.is_dir() {
return None;
}
let ext = match path.extension() {
Some(e) => e,
None => return None,
};
// TODO: include user added extensions with a set
if ext != "vsh" && ext != "fsh" && ext != "csh" && ext != "gsh" && ext != "glsl" && ext != "inc" {
return None;
}
Some(entry.into_path())
})
.for_each(|path| {
// iterate all valid found files, search for includes, add a node into the graph for each
// file and add a file->includes KV into the map
self.add_file_and_includes_to_graph(&path);
});
info!("finished building project include graph");
}
fn add_file_and_includes_to_graph(&self, path: &Path) {
let includes = self.find_includes(path);
let idx = self.graph.borrow_mut().add_node(path);
debug!("adding includes for new file"; "file" => path.to_str().unwrap(), "includes" => format!("{:?}", includes));
for include in includes {
self.add_include(include, idx);
}
}
fn add_include(&self, include: (PathBuf, IncludePosition), node: NodeIndex) {
let child = self.graph.borrow_mut().add_node(&include.0);
self.graph.borrow_mut().add_edge(node, child, include.1);
}
pub fn find_includes(&self, file: &Path) -> Vec<(PathBuf, IncludePosition)> {
let mut includes = Vec::default();
let buf = BufReader::new(std::fs::File::open(file).unwrap());
buf.lines()
.enumerate()
.filter_map(|line| match line.1 {
Ok(t) => Some((line.0, t)),
Err(_e) => None,
})
.filter(|line| RE_INCLUDE.is_match(line.1.as_str()))
.for_each(|line| {
let cap = RE_INCLUDE.captures(line.1.as_str()).unwrap().get(1).unwrap();
let start = cap.start();
let end = cap.end();
let mut path: String = cap.as_str().into();
let full_include = if path.starts_with('/') {
path = path.strip_prefix('/').unwrap().to_string();
self.root.join("shaders").join(PathBuf::from_slash(&path))
} else {
file.parent().unwrap().join(PathBuf::from_slash(&path))
};
includes.push((full_include, IncludePosition { line: line.0, start, end }));
});
includes
}
fn update_includes(&self, file: &Path) {
let includes = self.find_includes(file);
info!("includes found for file"; "file" => file.to_str().unwrap(), "includes" => format!("{:?}", includes));
let idx = match self.graph.borrow_mut().find_node(file) {
None => return,
Some(n) => n,
};
let prev_children: HashSet<_> = HashSet::from_iter(self.graph.borrow().get_all_child_positions(idx).map(|tup| {
(self.graph.borrow().get_node(tup.0), tup.1)
}));
let new_children: HashSet<_> = includes.iter().cloned().collect();
let to_be_added = new_children.difference(&prev_children);
let to_be_removed = prev_children.difference(&new_children);
debug!(
"include sets diff'd";
"for removal" => format!("{:?}", to_be_removed),
"for addition" => format!("{:?}", to_be_added)
);
for removal in to_be_removed {
let child = self.graph.borrow_mut().find_node(&removal.0).unwrap();
self.graph.borrow_mut().remove_edge(idx, child, removal.1);
}
for insertion in to_be_added {
self.add_include(includes.iter().find(|f| f.0 == *insertion.0).unwrap().clone(), idx);
}
}
pub fn lint(&self, uri: &Path) -> Result<HashMap<Url, Vec<Diagnostic>>> {
// get all top level ancestors of this file
let file_ancestors = match self.get_file_toplevel_ancestors(uri) {
Ok(opt) => match opt {
Some(ancestors) => ancestors,
None => vec![],
},
Err(e) => return Err(e),
};
info!(
"top-level file ancestors found";
"uri" => uri.to_str().unwrap(),
"ancestors" => format!("{:?}", file_ancestors
.iter()
.map(|e| PathBuf::from_str(
&self.graph.borrow().graph[*e].clone()
)
.unwrap())
.collect::<Vec<PathBuf>>())
);
// the set of all filepath->content.
let mut all_sources: HashMap<PathBuf, String> = HashMap::new();
// the set of filepath->list of diagnostics to report
let mut diagnostics: HashMap<Url, Vec<Diagnostic>> = HashMap::new();
// we want to backfill the diagnostics map with all linked sources
let back_fill = |all_sources: &HashMap<PathBuf, String>, diagnostics: &mut HashMap<Url, Vec<Diagnostic>>| {
for path in all_sources.keys() {
diagnostics.entry(Url::from_file_path(path).unwrap()).or_default();
}
};
// if we are a top-level file (this has to be one of the set defined by Optifine, right?)
if file_ancestors.is_empty() {
// gather the list of all descendants
let root = self.graph.borrow_mut().find_node(uri).unwrap();
let tree = match self.get_dfs_for_node(root) {
Ok(tree) => tree,
Err(e) => {
diagnostics.insert(Url::from_file_path(uri).unwrap(), vec![e.into()]);
return Ok(diagnostics);
}
};
all_sources.extend(self.load_sources(&tree)?);
let mut source_mapper = source_mapper::SourceMapper::new(all_sources.len());
let view = {
let graph = self.graph.borrow();
let merged_string = {
merge_views::MergeViewBuilder::new(&tree, &all_sources, &graph, &mut source_mapper).build()
};
merged_string
};
let root_path = self.graph.borrow().get_node(root);
let ext = match root_path.extension() {
Some(ext) => ext.to_str().unwrap(),
None => {
back_fill(&all_sources, &mut diagnostics);
return Ok(diagnostics);
}
};
if !TOPLEVEL_FILES.contains(root_path.strip_prefix(&self.root).unwrap()) {
warn!("got a non-valid toplevel file"; "root_ancestor" => root_path.to_str().unwrap(), "stripped" => root_path.strip_prefix(&self.root).unwrap().to_str().unwrap());
back_fill(&all_sources, &mut diagnostics);
return Ok(diagnostics);
}
let tree_type = if ext == "fsh" {
TreeType::Fragment
} else if ext == "vsh" {
TreeType::Vertex
} else if ext == "gsh" {
TreeType::Geometry
} else if ext == "csh" {
TreeType::Compute
} else {
unreachable!();
};
let stdout = match self.compile_shader_source(&view, tree_type, &root_path) {
Some(s) => s,
None => {
back_fill(&all_sources, &mut diagnostics);
return Ok(diagnostics);
}
};
let diagnostics_parser = diagnostics_parser::DiagnosticsParser::new(self.opengl_context.as_ref());
diagnostics.extend(diagnostics_parser.parse_diagnostics_output(stdout, uri, &source_mapper, &self.graph.borrow()));
} else {
let mut all_trees: Vec<(TreeType, Vec<FilialTuple>)> = Vec::new();
for root in &file_ancestors {
let nodes = match self.get_dfs_for_node(*root) {
Ok(nodes) => nodes,
Err(e) => {
diagnostics.insert(Url::from_file_path(uri).unwrap(), vec![e.into()]);
back_fill(&all_sources, &mut diagnostics); // TODO: confirm
return Ok(diagnostics);
}
};
let root_path = self.graph.borrow().get_node(*root).clone();
let ext = match root_path.extension() {
Some(ext) => ext.to_str().unwrap(),
None => continue,
};
if !TOPLEVEL_FILES.contains(root_path.strip_prefix(&self.root).unwrap()) {
warn!("got a non-valid toplevel file"; "root_ancestor" => root_path.to_str().unwrap(), "stripped" => root_path.strip_prefix(&self.root).unwrap().to_str().unwrap());
continue;
}
let tree_type = if ext == "fsh" {
TreeType::Fragment
} else if ext == "vsh" {
TreeType::Vertex
} else if ext == "gsh" {
TreeType::Geometry
} else if ext == "csh" {
TreeType::Compute
} else {
unreachable!();
};
let sources = self.load_sources(&nodes)?;
all_trees.push((tree_type, nodes));
all_sources.extend(sources);
}
for tree in all_trees {
// bit over-zealous in allocation but better than having to resize
let mut source_mapper = source_mapper::SourceMapper::new(all_sources.len());
let view = {
let graph = self.graph.borrow();
let merged_string = {
merge_views::MergeViewBuilder::new(&tree.1, &all_sources, &graph, &mut source_mapper).build()
};
merged_string
};
let root_path = self.graph.borrow().get_node(tree.1.first().unwrap().child);
let stdout = match self.compile_shader_source(&view, tree.0, &root_path) {
Some(s) => s,
None => continue,
};
let diagnostics_parser = diagnostics_parser::DiagnosticsParser::new(self.opengl_context.as_ref());
diagnostics.extend(diagnostics_parser.parse_diagnostics_output(stdout, uri, &source_mapper, &self.graph.borrow()));
}
};
back_fill(&all_sources, &mut diagnostics);
Ok(diagnostics)
}
fn compile_shader_source(&self, source: &str, tree_type: TreeType, path: &Path) -> Option<String> {
let result = self.opengl_context.clone().validate(tree_type, source);
match &result {
Some(output) => {
info!("compilation errors reported"; "errors" => format!("`{}`", output.replace('\n', "\\n")), "tree_root" => path.to_str().unwrap())
}
None => info!("compilation reported no errors"; "tree_root" => path.to_str().unwrap()),
};
result
}
pub fn get_dfs_for_node(&self, root: NodeIndex) -> Result<Vec<FilialTuple>, dfs::error::CycleError> {
let graph_ref = self.graph.borrow();
let dfs = dfs::Dfs::new(&graph_ref, root);
dfs.collect::<Result<_, _>>()
}
pub fn load_sources(&self, nodes: &[FilialTuple]) -> Result<HashMap<PathBuf, String>> {
let mut sources = HashMap::new();
for node in nodes {
let graph = self.graph.borrow();
let path = graph.get_node(node.child);
if sources.contains_key(&path) {
continue;
}
let source = match fs::read_to_string(&path) {
Ok(s) => s,
Err(e) => return Err(anyhow!("error reading {:?}: {}", path, e)),
};
let source = source.replace("\r\n", "\n");
sources.insert(path.clone(), source);
}
Ok(sources)
}
fn get_file_toplevel_ancestors(&self, uri: &Path) -> Result<Option<Vec<petgraph::stable_graph::NodeIndex>>> {
let curr_node = match self.graph.borrow_mut().find_node(uri) {
Some(n) => n,
None => return Err(anyhow!("node not found {:?}", uri)),
};
let roots = self.graph.borrow().collect_root_ancestors(curr_node);
if roots.is_empty() {
return Ok(None);
}
Ok(Some(roots))
}
pub fn publish_diagnostic(&self, diagnostics: HashMap<Url, Vec<Diagnostic>>, document_version: Option<i32>) {
// info!("DIAGNOSTICS:\n{:?}", diagnostics);
for (uri, diagnostics) in diagnostics {
self.endpoint
.send_notification(
PublishDiagnostics::METHOD,
PublishDiagnosticsParams {
uri,
diagnostics,
version: document_version,
},
)
.expect("failed to publish diagnostics");
}
}
fn set_status(&self, status: impl Into<String>, message: impl Into<String>, icon: impl Into<String>) {
self.endpoint
.send_notification(
lsp_ext::Status::METHOD,
lsp_ext::StatusParams {
status: status.into(),
message: Some(message.into()),
icon: Some(icon.into()),
},
)
.unwrap_or(());
}
}
impl LanguageServerHandling for MinecraftShaderLanguageServer {
fn initialize(&mut self, params: InitializeParams, completable: MethodCompletable<InitializeResult, InitializeError>) {
logging::slog_with_trace_id(|| {
info!("starting server...");
let capabilities = ServerCapabilities {
definition_provider: Some(OneOf::Left(true)),
references_provider: Some(OneOf::Left(true)),
document_symbol_provider: Some(OneOf::Left(true)),
document_link_provider: Some(DocumentLinkOptions {
resolve_provider: None,
work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None },
}),
execute_command_provider: Some(ExecuteCommandOptions {
commands: vec!["graphDot".into()],
work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None },
}),
text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions {
open_close: Some(true),
will_save: None,
will_save_wait_until: None,
change: Some(TextDocumentSyncKind::FULL),
save: Some(TextDocumentSyncSaveOptions::SaveOptions(SaveOptions { include_text: Some(true) })),
})),
..ServerCapabilities::default()
};
let root = match params.root_uri {
Some(uri) => PathBuf::from_url(uri),
None => {
completable.complete(Err(MethodError {
code: 42069,
message: "Must be in workspace".into(),
data: InitializeError { retry: false },
}));
return;
}
};
completable.complete(Ok(InitializeResult {
capabilities,
server_info: None,
}));
self.set_status("loading", "Building dependency graph...", "$(loading~spin)");
self.root = root;
self.build_initial_graph();
self.set_status("ready", "Project initialized", "$(check)");
});
}
fn shutdown(&mut self, _: (), completable: LSCompletable<()>) {
warn!("shutting down language server...");
completable.complete(Ok(()));
}
fn exit(&mut self, _: ()) {
self.endpoint.request_shutdown();
}
fn workspace_change_configuration(&mut self, params: DidChangeConfigurationParams) {
logging::slog_with_trace_id(|| {
#[derive(Deserialize)]
struct Configuration {
#[serde(alias = "logLevel")]
log_level: String,
}
let config: Configuration = from_value(params.settings.as_object().unwrap().get("mcglsl").unwrap().to_owned()).unwrap();
info!("got updated configuration"; "config" => params.settings.as_object().unwrap().get("mcglsl").unwrap().to_string());
configuration::handle_log_level_change(config.log_level, |level| {
self.log_guard = None; // set to None so Drop is invoked
self.log_guard = Some(logging::set_logger_with_level(level));
})
});
}
fn did_open_text_document(&mut self, params: DidOpenTextDocumentParams) {
logging::slog_with_trace_id(|| {
//info!("opened doc {}", params.text_document.uri);
let path = PathBuf::from_url(params.text_document.uri);
if !path.starts_with(&self.root) {
return;
}
if self.graph.borrow_mut().find_node(&path) == None {
self.add_file_and_includes_to_graph(&path);
}
match self.lint(&path) {
Ok(diagnostics) => self.publish_diagnostic(diagnostics, None),
Err(e) => error!("error linting"; "error" => format!("{:?}", e), "path" => path.to_str().unwrap()),
}
});
}
fn did_change_text_document(&mut self, _: DidChangeTextDocumentParams) {}
fn did_close_text_document(&mut self, _: DidCloseTextDocumentParams) {}
fn did_save_text_document(&mut self, params: DidSaveTextDocumentParams) {
logging::slog_with_trace_id(|| {
let path = PathBuf::from_url(params.text_document.uri);
if !path.starts_with(&self.root) {
return;
}
self.update_includes(&path);
match self.lint(&path) {
Ok(diagnostics) => self.publish_diagnostic(diagnostics, None),
Err(e) => error!("error linting"; "error" => format!("{:?}", e), "path" => path.to_str().unwrap()),
}
});
}
fn did_change_watched_files(&mut self, _: DidChangeWatchedFilesParams) {}
fn completion(&mut self, _: TextDocumentPositionParams, completable: LSCompletable<CompletionList>) {
completable.complete(Err(Self::error_not_available(())));
}
fn resolve_completion_item(&mut self, _: CompletionItem, completable: LSCompletable<CompletionItem>) {
completable.complete(Err(Self::error_not_available(())));
}
fn hover(&mut self, _: TextDocumentPositionParams, _: LSCompletable<Hover>) {
/* completable.complete(Ok(Hover{
contents: HoverContents::Markup(MarkupContent{
kind: MarkupKind::Markdown,
value: String::from("# Hello World"),
}),
range: None,
})); */
}
fn execute_command(&mut self, params: ExecuteCommandParams, completable: LSCompletable<Option<Value>>) {
logging::slog_with_trace_id(|| {
match self
.command_provider
.as_ref()
.unwrap()
.execute(&params.command, &params.arguments, &self.root)
{
Ok(resp) => {
info!("executed command successfully"; "command" => params.command.clone());
self.endpoint
.send_notification(
ShowMessage::METHOD,
ShowMessageParams {
typ: MessageType::INFO,
message: format!("Command {} executed successfully.", params.command),
},
)
.expect("failed to send popup/show message notification");
completable.complete(Ok(Some(resp)))
}
Err(err) => {
error!("failed to execute command"; "command" => params.command.clone(), "error" => format!("{:?}", err));
self.endpoint
.send_notification(
ShowMessage::METHOD,
ShowMessageParams {
typ: MessageType::ERROR,
message: format!("Failed to execute `{}`. Reason: {}", params.command, err),
},
)
.expect("failed to send popup/show message notification");
completable.complete(Err(MethodError::new(32420, err.to_string(), ())))
}
}
});
}
fn signature_help(&mut self, _: TextDocumentPositionParams, completable: LSCompletable<SignatureHelp>) {
completable.complete(Err(Self::error_not_available(())));
}
fn goto_definition(&mut self, params: TextDocumentPositionParams, completable: LSCompletable<Vec<Location>>) {
logging::slog_with_trace_id(|| {
let path = PathBuf::from_url(params.text_document.uri);
if !path.starts_with(&self.root) {
return;
}
let parser = &mut self.tree_sitter.borrow_mut();
let parser_ctx = match navigation::ParserContext::new(parser, &path) {
Ok(ctx) => ctx,
Err(e) => {
return completable.complete(Err(MethodError {
code: 42069,
message: format!("error building parser context: error={}, path={:?}", e, path),
data: (),
}))
}
};
match parser_ctx.find_definitions(&path, params.position) {
Ok(locations) => completable.complete(Ok(locations.unwrap_or_default())),
Err(e) => completable.complete(Err(MethodError {
code: 42069,
message: format!("error finding definitions: error={}, path={:?}", e, path),
data: (),
})),
}
});
}
fn references(&mut self, params: ReferenceParams, completable: LSCompletable<Vec<Location>>) {
logging::slog_with_trace_id(|| {
let path = PathBuf::from_url(params.text_document_position.text_document.uri);
if !path.starts_with(&self.root) {
return;
}
let parser = &mut self.tree_sitter.borrow_mut();
let parser_ctx = match navigation::ParserContext::new(parser, &path) {
Ok(ctx) => ctx,
Err(e) => {
return completable.complete(Err(MethodError {
code: 42069,
message: format!("error building parser context: error={}, path={:?}", e, path),
data: (),
}))
}
};
match parser_ctx.find_references(&path, params.text_document_position.position) {
Ok(locations) => completable.complete(Ok(locations.unwrap_or_default())),
Err(e) => completable.complete(Err(MethodError {
code: 42069,
message: format!("error finding definitions: error={}, path={:?}", e, path),
data: (),
})),
}
});
}
fn document_highlight(&mut self, _: TextDocumentPositionParams, completable: LSCompletable<Vec<DocumentHighlight>>) {
completable.complete(Err(Self::error_not_available(())));
}
fn document_symbols(&mut self, params: DocumentSymbolParams, completable: LSCompletable<DocumentSymbolResponse>) {
logging::slog_with_trace_id(|| {
let path = PathBuf::from_url(params.text_document.uri);
if !path.starts_with(&self.root) {
return;
}
let parser = &mut self.tree_sitter.borrow_mut();
let parser_ctx = match navigation::ParserContext::new(parser, &path) {
Ok(ctx) => ctx,
Err(e) => {
return completable.complete(Err(MethodError {
code: 42069,
message: format!("error building parser context: error={}, path={:?}", e, path),
data: (),
}))
}
};
match parser_ctx.list_symbols(&path) {
Ok(symbols) => completable.complete(Ok(DocumentSymbolResponse::from(symbols.unwrap_or_default()))),
Err(e) => {
return completable.complete(Err(MethodError {
code: 42069,
message: format!("error finding definitions: error={}, path={:?}", e, path),
data: (),
}))
}
}
});
}
fn workspace_symbols(&mut self, _: WorkspaceSymbolParams, completable: LSCompletable<DocumentSymbolResponse>) {
completable.complete(Err(Self::error_not_available(())));
}
fn code_action(&mut self, _: CodeActionParams, completable: LSCompletable<Vec<Command>>) {
completable.complete(Err(Self::error_not_available(())));
}
fn code_lens(&mut self, _: CodeLensParams, completable: LSCompletable<Vec<CodeLens>>) {
completable.complete(Err(Self::error_not_available(())));
}
fn code_lens_resolve(&mut self, _: CodeLens, completable: LSCompletable<CodeLens>) {
completable.complete(Err(Self::error_not_available(())));
}
fn document_link(&mut self, params: DocumentLinkParams, completable: LSCompletable<Vec<DocumentLink>>) {
logging::slog_with_trace_id(|| {
// node for current document
let curr_doc = PathBuf::from_url(params.text_document.uri);
let node = match self.graph.borrow_mut().find_node(&curr_doc) {
Some(n) => n,
None => {
warn!("document not found in graph"; "path" => curr_doc.to_str().unwrap());
completable.complete(Ok(vec![]));
return;
}
};
let edges: Vec<DocumentLink> = self
.graph
.borrow()
.child_node_indexes(node)
.filter_map::<Vec<DocumentLink>, _>(|child| {
let graph = self.graph.borrow();
graph.get_child_positions(node, child).map(|value| {
let path = graph.get_node(child);
let url = match Url::from_file_path(&path) {
Ok(url) => url,
Err(e) => {
error!("error converting into url"; "path" => path.to_str().unwrap(), "error" => format!("{:?}", e));
return None;
}
};
Some(DocumentLink {
range: Range::new(
Position::new(u32::try_from(value.line).unwrap(), u32::try_from(value.start).unwrap()),
Position::new(u32::try_from(value.line).unwrap(), u32::try_from(value.end).unwrap()),
),
target: Some(url.clone()),
tooltip: Some(url.path().to_string()),
data: None,
})
}).collect()
})
.flatten()
.collect();
debug!("document link results";
"links" => format!("{:?}", edges.iter().map(|e| (e.range, e.target.as_ref().unwrap().path())).collect::<Vec<_>>()),
"path" => curr_doc.to_str().unwrap(),
);
completable.complete(Ok(edges));
});
}
fn document_link_resolve(&mut self, _: DocumentLink, completable: LSCompletable<DocumentLink>) {
completable.complete(Err(Self::error_not_available(())));
}
fn formatting(&mut self, _: DocumentFormattingParams, completable: LSCompletable<Vec<TextEdit>>) {
completable.complete(Err(Self::error_not_available(())));
}
fn range_formatting(&mut self, _: DocumentRangeFormattingParams, completable: LSCompletable<Vec<TextEdit>>) {
completable.complete(Err(Self::error_not_available(())));
}
fn on_type_formatting(&mut self, _: DocumentOnTypeFormattingParams, completable: LSCompletable<Vec<TextEdit>>) {
completable.complete(Err(Self::error_not_available(())));
}
fn rename(&mut self, _: RenameParams, completable: LSCompletable<WorkspaceEdit>) {
completable.complete(Err(Self::error_not_available(())));
}
let stdin = tokio::io::stdin();
let stdout = tokio::io::stdout();
let (service, socket) = LspService::new(|client| Server::new(client, opengl::ContextFacade::default));
tower_lsp::Server::new(stdin, stdout, socket)
.serve(service)
.with_logger(logger())
.await;
}

View file

@ -1,645 +0,0 @@
use std::cmp::min;
use std::iter::Peekable;
use std::{
collections::{HashMap, LinkedList, VecDeque},
path::{Path, PathBuf},
};
use core::slice::Iter;
use petgraph::stable_graph::NodeIndex;
use slog_scope::debug;
use crate::graph::CachedStableGraph;
use crate::source_mapper::SourceMapper;
use crate::IncludePosition;
/// FilialTuple represents a tuple (not really) of a child and any legitimate
/// parent. Parent can be nullable in the case of the child being a top level
/// node in the tree.
#[derive(Hash, PartialEq, Eq, Debug, Clone, Copy)]
pub struct FilialTuple {
pub child: NodeIndex,
pub parent: Option<NodeIndex>,
}
/// Merges the source strings according to the nodes comprising a tree of imports into a GLSL source string
/// that can be handed off to the GLSL compiler.
pub struct MergeViewBuilder<'a> {
nodes: &'a [FilialTuple],
nodes_peeker: Peekable<Iter<'a, FilialTuple>>,
sources: &'a HashMap<PathBuf, String>,
graph: &'a CachedStableGraph,
source_mapper: &'a mut SourceMapper,
// holds the offset into the child which has been added to the merge list for a parent.
// A child can have multiple parents for a given tree, and be included multiple times
// by the same parent, hence we have to track it for a ((child, parent), line) tuple
// instead of just the child or (child, parent).
last_offset_set: HashMap<FilialTuple, usize>,
// holds, for any given filial tuple, the iterator yielding all the positions at which the child
// is included into the parent in line-sorted order. This is necessary for files that are imported
// more than once into the same parent, so we can easily get the next include position.
parent_child_edge_iterator: HashMap<FilialTuple, Box<(dyn Iterator<Item = IncludePosition> + 'a)>>,
}
impl<'a> MergeViewBuilder<'a> {
pub fn new(
nodes: &'a [FilialTuple], sources: &'a HashMap<PathBuf, String>, graph: &'a CachedStableGraph, source_mapper: &'a mut SourceMapper,
) -> Self {
MergeViewBuilder {
nodes,
nodes_peeker: nodes.iter().peekable(),
sources,
graph,
source_mapper,
last_offset_set: HashMap::new(),
parent_child_edge_iterator: HashMap::new(),
}
}
pub fn build(&mut self) -> String {
// contains additionally inserted lines such as #line and other directives, preamble defines etc
let mut extra_lines: Vec<String> = Vec::new();
extra_lines.reserve((self.nodes.len() * 2) + 2);
// list of source code views onto the below sources
let mut merge_list: LinkedList<&'a str> = LinkedList::new();
// invariant: nodes_iter always has _at least_ one element. Can't save a not-file :B
let first = self.nodes_peeker.next().unwrap().child;
let first_path = self.graph.get_node(first);
let first_source = self.sources.get(&first_path).unwrap();
// seed source_mapper with top-level file
self.source_mapper.get_num(first);
let version_line_offset = self.find_version_offset(first_source);
let _version_char_offsets = self.char_offset_for_line(version_line_offset, first_source);
// add_preamble(
// version_line_offset,
// version_char_offsets.1,
// &first_path,
// first,
// first_source,
// &mut merge_list,
// &mut extra_lines,
// source_mapper,
// );
// last_offset_set.insert((first, None), version_char_offsets.1);
self.set_last_offset_for_tuple(None, first, 0);
// stack to keep track of the depth first traversal
let mut stack = VecDeque::<NodeIndex>::new();
self.create_merge_views(&mut merge_list, &mut extra_lines, &mut stack);
// now we add a view of the remainder of the root file
let offset = self.get_last_offset_for_tuple(None, first).unwrap();
let len = first_source.len();
merge_list.push_back(&first_source[min(offset, len)..]);
let total_len = merge_list.iter().fold(0, |a, b| a + b.len());
let mut merged = String::with_capacity(total_len);
merged.extend(merge_list);
merged
}
fn create_merge_views(&mut self, merge_list: &mut LinkedList<&'a str>, extra_lines: &mut Vec<String>, stack: &mut VecDeque<NodeIndex>) {
loop {
let n = match self.nodes_peeker.next() {
Some(n) => n,
None => return,
};
// invariant: never None as only the first element in `nodes` should have a None, which is popped off in the calling function
let (parent, child) = (n.parent.unwrap(), n.child);
// gets the next include position for the filial tuple, seeding if this is the first time querying this tuple
let edge = self
.parent_child_edge_iterator
.entry(*n)
.or_insert_with(|| {
let child_positions = self.graph.get_child_positions(parent, child);
Box::new(child_positions)
})
.next()
.unwrap();
let parent_path = self.graph.get_node(parent).clone();
let child_path = self.graph.get_node(child).clone();
let parent_source = self.sources.get(&parent_path).unwrap();
let (char_for_line, char_following_line) = self.char_offset_for_line(edge.line, parent_source);
let offset = *self
.set_last_offset_for_tuple(stack.back().copied(), parent, char_following_line)
.get_or_insert(0);
debug!("creating view to start child file";
"parent" => parent_path.to_str().unwrap(), "child" => child_path.to_str().unwrap(),
"grandparent" => stack.back().copied().map(|g| self.graph.get_node(g).to_str().unwrap().to_string()), // self.graph.get_node().to_str().unwrap(),
"last_parent_offset" => offset, "line" => edge.line, "char_for_line" => char_for_line,
"char_following_line" => char_following_line,
);
merge_list.push_back(&parent_source[offset..char_for_line]);
self.add_opening_line_directive(&child_path, child, merge_list, extra_lines);
match self.nodes_peeker.peek() {
Some(next) => {
let next = *next;
// if the next pair's parent is not a child of the current pair, we dump the rest of this childs source
if next.parent.unwrap() != child {
let child_source = self.sources.get(&child_path).unwrap();
// if ends in \n\n, we want to exclude the last \n for some reason. Ask optilad
let offset = {
match child_source.ends_with('\n') {
true => child_source.len() - 1,
false => child_source.len(),
}
};
merge_list.push_back(&child_source[..offset]);
self.set_last_offset_for_tuple(Some(parent), child, 0);
// +2 because edge.line is 0 indexed but #line is 1 indexed and references the *following* line
self.add_closing_line_directive(edge.line + 2, &parent_path, parent, merge_list, extra_lines);
// if the next pair's parent is not the current pair's parent, we need to bubble up
if stack.contains(&next.parent.unwrap()) {
return;
}
continue;
}
stack.push_back(parent);
self.create_merge_views(merge_list, extra_lines, stack);
stack.pop_back();
let offset = self.get_last_offset_for_tuple(Some(parent), child).unwrap();
let child_source = self.sources.get(&child_path).unwrap();
// this evaluates to false once the file contents have been exhausted aka offset = child_source.len() + 1
let end_offset = match child_source.ends_with('\n') {
true => 1,
false => 0,
};
if offset < child_source.len() - end_offset {
// if ends in \n\n, we want to exclude the last \n for some reason. Ask optilad
merge_list.push_back(&child_source[offset..child_source.len() - end_offset]);
self.set_last_offset_for_tuple(Some(parent), child, 0);
}
// +2 because edge.line is 0 indexed but #line is 1 indexed and references the *following* line
self.add_closing_line_directive(edge.line + 2, &parent_path, parent, merge_list, extra_lines);
// we need to check the next item at the point of original return further down the callstack
if self.nodes_peeker.peek().is_some() && stack.contains(&self.nodes_peeker.peek().unwrap().parent.unwrap()) {
return;
}
}
None => {
let child_source = self.sources.get(&child_path).unwrap();
// if ends in \n\n, we want to exclude the last \n for some reason. Ask optilad
let offset = match child_source.ends_with('\n') {
true => child_source.len() - 1,
false => child_source.len(),
};
merge_list.push_back(&child_source[..offset]);
self.set_last_offset_for_tuple(Some(parent), child, 0);
// +2 because edge.line is 0 indexed but #line is 1 indexed and references the *following* line
self.add_closing_line_directive(edge.line + 2, &parent_path, parent, merge_list, extra_lines);
}
}
}
}
fn set_last_offset_for_tuple(&mut self, parent: Option<NodeIndex>, child: NodeIndex, offset: usize) -> Option<usize> {
debug!("inserting last offset";
"parent" => parent.map(|p| self.graph.get_node(p).to_str().unwrap().to_string()),
"child" => self.graph.get_node(child).to_str().unwrap().to_string(),
"offset" => offset);
self.last_offset_set.insert(FilialTuple { child, parent }, offset)
}
fn get_last_offset_for_tuple(&self, parent: Option<NodeIndex>, child: NodeIndex) -> Option<usize> {
self.last_offset_set.get(&FilialTuple { child, parent }).copied()
}
// returns the character offset + 1 of the end of line number `line` and the character
// offset + 1 for the end of the line after the previous one
fn char_offset_for_line(&self, line_num: usize, source: &str) -> (usize, usize) {
let mut char_for_line: usize = 0;
let mut char_following_line: usize = 0;
for (n, line) in source.lines().enumerate() {
if n == line_num {
char_following_line += line.len() + 1;
break;
}
char_for_line += line.len() + 1;
char_following_line = char_for_line;
}
(char_for_line, char_following_line)
}
fn find_version_offset(&self, source: &str) -> usize {
source
.lines()
.enumerate()
.find(|(_, line)| line.starts_with("#version "))
.map_or(0, |(i, _)| i)
}
// fn add_preamble<'a>(
// version_line_offset: usize, version_char_offset: usize, path: &Path, node: NodeIndex, source: &'a str,
// merge_list: &mut LinkedList<&'a str>, extra_lines: &mut Vec<String>, source_mapper: &mut SourceMapper,
// ) {
// // TODO: Optifine #define preabmle
// merge_list.push_back(&source[..version_char_offset]);
// let google_line_directive = format!(
// "#extension GL_GOOGLE_cpp_style_line_directive : enable\n#line {} {} // {}\n",
// // +2 because 0 indexed but #line is 1 indexed and references the *following* line
// version_line_offset + 2,
// source_mapper.get_num(node),
// path.to_str().unwrap().replace('\\', "\\\\"),
// );
// extra_lines.push(google_line_directive);
// unsafe_get_and_insert(merge_list, extra_lines);
// }
fn add_opening_line_directive(
&mut self, path: &Path, node: NodeIndex, merge_list: &mut LinkedList<&str>, extra_lines: &mut Vec<String>,
) {
let line_directive = format!(
"#line 1 {} // {}\n",
self.source_mapper.get_num(node),
path.to_str().unwrap().replace('\\', "\\\\")
);
extra_lines.push(line_directive);
self.unsafe_get_and_insert(merge_list, extra_lines);
}
fn add_closing_line_directive(
&mut self, line: usize, path: &Path, node: NodeIndex, merge_list: &mut LinkedList<&str>, extra_lines: &mut Vec<String>,
) {
// Optifine doesn't seem to add a leading newline if the previous line was a #line directive
let line_directive = if let Some(l) = merge_list.back() {
if l.trim().starts_with("#line") {
format!(
"#line {} {} // {}\n",
line,
self.source_mapper.get_num(node),
path.to_str().unwrap().replace('\\', "\\\\")
)
} else {
format!(
"\n#line {} {} // {}\n",
line,
self.source_mapper.get_num(node),
path.to_str().unwrap().replace('\\', "\\\\")
)
}
} else {
format!(
"\n#line {} {} // {}\n",
line,
self.source_mapper.get_num(node),
path.to_str().unwrap().replace('\\', "\\\\")
)
};
extra_lines.push(line_directive);
self.unsafe_get_and_insert(merge_list, extra_lines);
}
fn unsafe_get_and_insert(&self, merge_list: &mut LinkedList<&str>, extra_lines: &[String]) {
// :^)
unsafe {
let vec_ptr_offset = extra_lines.as_ptr().add(extra_lines.len() - 1);
merge_list.push_back(&vec_ptr_offset.as_ref().unwrap()[..]);
}
}
}
#[cfg(test)]
mod merge_view_test {
use std::fs;
use std::path::PathBuf;
use crate::merge_views::MergeViewBuilder;
use crate::source_mapper::SourceMapper;
use crate::test::{copy_to_and_set_root, new_temp_server};
use crate::IncludePosition;
#[test]
#[logging_macro::log_scope]
fn test_generate_merge_list_01() {
let mut server = new_temp_server(None);
let (_tmp_dir, tmp_path) = copy_to_and_set_root("./testdata/01", &mut server);
server.endpoint.request_shutdown();
let final_idx = server.graph.borrow_mut().add_node(&tmp_path.join("shaders").join("final.fsh"));
let common_idx = server.graph.borrow_mut().add_node(&tmp_path.join("shaders").join("common.glsl"));
server
.graph
.borrow_mut()
.add_edge(final_idx, common_idx, IncludePosition { line: 2, start: 0, end: 0 });
let nodes = server.get_dfs_for_node(final_idx).unwrap();
let sources = server.load_sources(&nodes).unwrap();
let graph_borrow = server.graph.borrow();
let mut source_mapper = SourceMapper::new(0);
let result = MergeViewBuilder::new(&nodes, &sources, &graph_borrow, &mut source_mapper).build();
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
let mut truth = fs::read_to_string(merge_file).unwrap();
// truth = truth.replacen(
// "!!",
// &tmp_path.join("shaders").join("final.fsh").to_str().unwrap().replace('\\', "\\\\"),
// 1,
// );
truth = truth.replacen(
"!!",
&tmp_path.join("shaders").join("common.glsl").to_str().unwrap().replace('\\', "\\\\"),
1,
);
truth = truth.replace(
"!!",
&tmp_path.join("shaders").join("final.fsh").to_str().unwrap().replace('\\', "\\\\"),
);
assert_eq!(result, truth);
}
#[test]
#[logging_macro::log_scope]
fn test_generate_merge_list_02() {
let mut server = new_temp_server(None);
let (_tmp_dir, tmp_path) = copy_to_and_set_root("./testdata/02", &mut server);
server.endpoint.request_shutdown();
let final_idx = server.graph.borrow_mut().add_node(&tmp_path.join("shaders").join("final.fsh"));
let test_idx = server
.graph
.borrow_mut()
.add_node(&tmp_path.join("shaders").join("utils").join("test.glsl"));
let burger_idx = server
.graph
.borrow_mut()
.add_node(&tmp_path.join("shaders").join("utils").join("burger.glsl"));
let sample_idx = server
.graph
.borrow_mut()
.add_node(&tmp_path.join("shaders").join("utils").join("sample.glsl"));
server
.graph
.borrow_mut()
.add_edge(final_idx, sample_idx, IncludePosition { line: 2, start: 0, end: 0 });
server
.graph
.borrow_mut()
.add_edge(sample_idx, burger_idx, IncludePosition { line: 4, start: 0, end: 0 });
server
.graph
.borrow_mut()
.add_edge(sample_idx, test_idx, IncludePosition { line: 6, start: 0, end: 0 });
let nodes = server.get_dfs_for_node(final_idx).unwrap();
let sources = server.load_sources(&nodes).unwrap();
let graph_borrow = server.graph.borrow();
let mut source_mapper = SourceMapper::new(0);
let result = MergeViewBuilder::new(&nodes, &sources, &graph_borrow, &mut source_mapper).build();
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
let mut truth = fs::read_to_string(merge_file).unwrap();
// truth = truth.replacen(
// "!!",
// &tmp_path.join("shaders").join("final.fsh").to_str().unwrap().replace('\\', "\\\\"),
// 1,
// );
for file in &["sample.glsl", "burger.glsl", "sample.glsl", "test.glsl", "sample.glsl"] {
let path = tmp_path.clone();
truth = truth.replacen(
"!!",
&path
.join("shaders")
.join("utils")
.join(file)
.to_str()
.unwrap()
.replace('\\', "\\\\"),
1,
);
}
truth = truth.replacen(
"!!",
&tmp_path.join("shaders").join("final.fsh").to_str().unwrap().replace('\\', "\\\\"),
1,
);
assert_eq!(result, truth);
}
#[test]
#[logging_macro::log_scope]
fn test_generate_merge_list_03() {
let mut server = new_temp_server(None);
let (_tmp_dir, tmp_path) = copy_to_and_set_root("./testdata/03", &mut server);
server.endpoint.request_shutdown();
let final_idx = server.graph.borrow_mut().add_node(&tmp_path.join("shaders").join("final.fsh"));
let test_idx = server
.graph
.borrow_mut()
.add_node(&tmp_path.join("shaders").join("utils").join("test.glsl"));
let burger_idx = server
.graph
.borrow_mut()
.add_node(&tmp_path.join("shaders").join("utils").join("burger.glsl"));
let sample_idx = server
.graph
.borrow_mut()
.add_node(&tmp_path.join("shaders").join("utils").join("sample.glsl"));
server
.graph
.borrow_mut()
.add_edge(final_idx, sample_idx, IncludePosition { line: 2, start: 0, end: 0 });
server
.graph
.borrow_mut()
.add_edge(sample_idx, burger_idx, IncludePosition { line: 4, start: 0, end: 0 });
server
.graph
.borrow_mut()
.add_edge(sample_idx, test_idx, IncludePosition { line: 6, start: 0, end: 0 });
let nodes = server.get_dfs_for_node(final_idx).unwrap();
let sources = server.load_sources(&nodes).unwrap();
let graph_borrow = server.graph.borrow();
let mut source_mapper = SourceMapper::new(0);
let result = MergeViewBuilder::new(&nodes, &sources, &graph_borrow, &mut source_mapper).build();
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
let mut truth = fs::read_to_string(merge_file).unwrap();
// truth = truth.replacen(
// "!!",
// &tmp_path.join("shaders").join("final.fsh").to_str().unwrap().replace('\\', "\\\\"),
// 1,
// );
for file in &["sample.glsl", "burger.glsl", "sample.glsl", "test.glsl", "sample.glsl"] {
let path = tmp_path.clone();
truth = truth.replacen(
"!!",
&path
.join("shaders")
.join("utils")
.join(file)
.to_str()
.unwrap()
.replace('\\', "\\\\"),
1,
);
}
truth = truth.replacen(
"!!",
&tmp_path.join("shaders").join("final.fsh").to_str().unwrap().replace('\\', "\\\\"),
1,
);
assert_eq!(result, truth);
}
#[test]
#[logging_macro::log_scope]
fn test_generate_merge_list_04() {
let mut server = new_temp_server(None);
let (_tmp_dir, tmp_path) = copy_to_and_set_root("./testdata/04", &mut server);
server.endpoint.request_shutdown();
let final_idx = server.graph.borrow_mut().add_node(&tmp_path.join("shaders").join("final.fsh"));
let utilities_idx = server
.graph
.borrow_mut()
.add_node(&tmp_path.join("shaders").join("utils").join("utilities.glsl"));
let stuff1_idx = server
.graph
.borrow_mut()
.add_node(&tmp_path.join("shaders").join("utils").join("stuff1.glsl"));
let stuff2_idx = server
.graph
.borrow_mut()
.add_node(&tmp_path.join("shaders").join("utils").join("stuff2.glsl"));
let matrices_idx = server
.graph
.borrow_mut()
.add_node(&tmp_path.join("shaders").join("lib").join("matrices.glsl"));
server
.graph
.borrow_mut()
.add_edge(final_idx, utilities_idx, IncludePosition { line: 2, start: 0, end: 0 });
server
.graph
.borrow_mut()
.add_edge(utilities_idx, stuff1_idx, IncludePosition { line: 0, start: 0, end: 0 });
server
.graph
.borrow_mut()
.add_edge(utilities_idx, stuff2_idx, IncludePosition { line: 1, start: 0, end: 0 });
server
.graph
.borrow_mut()
.add_edge(final_idx, matrices_idx, IncludePosition { line: 3, start: 0, end: 0 });
let nodes = server.get_dfs_for_node(final_idx).unwrap();
let sources = server.load_sources(&nodes).unwrap();
let graph_borrow = server.graph.borrow();
let mut source_mapper = SourceMapper::new(0);
let result = MergeViewBuilder::new(&nodes, &sources, &graph_borrow, &mut source_mapper).build();
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
let mut truth = fs::read_to_string(merge_file).unwrap();
for file in &[
// PathBuf::new().join("final.fsh").to_str().unwrap(),
PathBuf::new().join("utils").join("utilities.glsl").to_str().unwrap(),
PathBuf::new().join("utils").join("stuff1.glsl").to_str().unwrap(),
PathBuf::new().join("utils").join("utilities.glsl").to_str().unwrap(),
PathBuf::new().join("utils").join("stuff2.glsl").to_str().unwrap(),
PathBuf::new().join("utils").join("utilities.glsl").to_str().unwrap(),
PathBuf::new().join("final.fsh").to_str().unwrap(),
PathBuf::new().join("lib").join("matrices.glsl").to_str().unwrap(),
PathBuf::new().join("final.fsh").to_str().unwrap(),
] {
let path = tmp_path.clone();
truth = truth.replacen("!!", &path.join("shaders").join(file).to_str().unwrap().replace('\\', "\\\\"), 1);
}
assert_eq!(result, truth);
}
#[test]
#[logging_macro::log_scope]
fn test_generate_merge_list_06() {
let mut server = new_temp_server(None);
let (_tmp_dir, tmp_path) = copy_to_and_set_root("./testdata/06", &mut server);
server.endpoint.request_shutdown();
let final_idx = server.graph.borrow_mut().add_node(&tmp_path.join("shaders").join("final.fsh"));
let test_idx = server.graph.borrow_mut().add_node(&tmp_path.join("shaders").join("test.glsl"));
server
.graph
.borrow_mut()
.add_edge(final_idx, test_idx, IncludePosition { line: 3, start: 0, end: 0 });
server
.graph
.borrow_mut()
.add_edge(final_idx, test_idx, IncludePosition { line: 5, start: 0, end: 0 });
let nodes = server.get_dfs_for_node(final_idx).unwrap();
let sources = server.load_sources(&nodes).unwrap();
let graph_borrow = server.graph.borrow();
let mut source_mapper = SourceMapper::new(0);
let result = MergeViewBuilder::new(&nodes, &sources, &graph_borrow, &mut source_mapper).build();
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
let mut truth = fs::read_to_string(merge_file).unwrap();
for file in &[
// PathBuf::new().join("final.fsh").to_str().unwrap(),
PathBuf::new().join("test.glsl").to_str().unwrap(),
PathBuf::new().join("final.fsh").to_str().unwrap(),
PathBuf::new().join("test.glsl").to_str().unwrap(),
PathBuf::new().join("final.fsh").to_str().unwrap(),
] {
let path = tmp_path.clone();
truth = truth.replacen("!!", &path.join("shaders").join(file).to_str().unwrap().replace('\\', "\\\\"), 1);
}
assert_eq!(result, truth);
}
}

View file

@ -1,13 +1,12 @@
use std::{collections::HashMap, fs::read_to_string, path::Path, vec};
use anyhow::Result;
use rust_lsp::lsp_types::{DocumentSymbol, Location, Position, Range, SymbolKind};
use slog_scope::{debug, info, trace};
use logging::{trace, debug, info};
use sourcefile::LineMap;
use tower_lsp::lsp_types::{DocumentSymbol, Location, Position, Range, SymbolKind};
use tree_sitter::{Node, Parser, Point, Query, QueryCursor, Tree};
use url::Url;
use crate::linemap::LineMap;
#[derive(Clone, Debug, Hash, PartialEq, Eq, Default)]
struct SymbolName(String);
@ -53,8 +52,8 @@ impl SymbolName {
}
}
impl slog::Value for SymbolName {
fn serialize(&self, record: &slog::Record, key: slog::Key, serializer: &mut dyn slog::Serializer) -> slog::Result {
impl logging::Value for SymbolName {
fn serialize(&self, record: &logging::Record, key: logging::Key, serializer: &mut dyn logging::Serializer) -> logging::Result {
self.0.serialize(record, key, serializer)
}
}
@ -161,7 +160,7 @@ impl<'a> ParserContext<'a> {
})
}
pub fn list_symbols(&self, _path: &Path) -> Result<Option<Vec<DocumentSymbol>>> {
pub fn list_document_symbols(&self, _path: &Path) -> Result<Option<Vec<DocumentSymbol>>> {
let query = Query::new(tree_sitter_glsl::language(), LIST_SYMBOLS_STR)?;
let mut query_cursor = QueryCursor::new();
@ -222,7 +221,6 @@ impl<'a> ParserContext<'a> {
fqname_to_index.insert(fqname, parent_child_vec.len() - 1);
}
// let mut symbols = vec![];
for i in 1..parent_child_vec.len() {
let (left, right) = parent_child_vec.split_at_mut(i);
let parent = &right[0].0;

View file

@ -1,281 +0,0 @@
use super::*;
use std::fs;
use std::io;
use std::io::Result;
use pretty_assertions::assert_eq;
use tempdir::TempDir;
use fs_extra::{copy_items, dir};
use jsonrpc_common::*;
use jsonrpc_response::*;
struct StdoutNewline {
s: Box<dyn io::Write>,
}
impl io::Write for StdoutNewline {
fn write(&mut self, buf: &[u8]) -> Result<usize> {
let res = self.s.write(buf);
if buf[buf.len() - 1] == b"}"[0] {
#[allow(unused_variables)]
let res = self.s.write(b"\n\n");
}
res
}
fn flush(&mut self) -> Result<()> {
self.s.flush()
}
}
pub fn new_temp_server(opengl_context: Option<Box<dyn opengl::ShaderValidator>>) -> MinecraftShaderLanguageServer {
let endpoint = LSPEndpoint::create_lsp_output_with_output_stream(|| StdoutNewline { s: Box::new(io::sink()) });
let context = opengl_context.unwrap_or_else(|| Box::new(opengl::MockShaderValidator::new()));
MinecraftShaderLanguageServer {
endpoint,
graph: Rc::new(RefCell::new(graph::CachedStableGraph::new())),
root: "".into(),
command_provider: None,
opengl_context: context.into(),
log_guard: None,
tree_sitter: Rc::new(RefCell::new(Parser::new())),
}
}
fn copy_files(files: &str, dest: &TempDir) {
let opts = &dir::CopyOptions::new();
let files = fs::read_dir(files)
.unwrap()
.map(|e| String::from(e.unwrap().path().to_str().unwrap()))
.collect::<Vec<String>>();
copy_items(&files, dest.path().join("shaders"), opts).unwrap();
}
pub fn copy_to_and_set_root(test_path: &str, server: &mut MinecraftShaderLanguageServer) -> (Rc<TempDir>, PathBuf) {
let (_tmp_dir, tmp_path) = copy_to_tmp_dir(test_path);
server.root = tmp_path.clone(); //format!("{}{}", "file://", tmp_path);
(_tmp_dir, tmp_path)
}
fn copy_to_tmp_dir(test_path: &str) -> (Rc<TempDir>, PathBuf) {
let tmp_dir = Rc::new(TempDir::new("mcshader").unwrap());
fs::create_dir(tmp_dir.path().join("shaders")).unwrap();
copy_files(test_path, &tmp_dir);
let tmp_clone = tmp_dir.clone();
let tmp_path = tmp_clone.path().to_str().unwrap();
(tmp_dir, tmp_path.into())
}
#[allow(deprecated)]
#[test]
#[logging_macro::log_scope]
fn test_empty_initialize() {
let mut server = new_temp_server(None);
let tmp_dir = TempDir::new("mcshader").unwrap();
let tmp_path = tmp_dir.path();
let initialize_params = InitializeParams {
process_id: None,
root_path: None,
root_uri: Some(Url::from_directory_path(tmp_path).unwrap()),
client_info: None,
initialization_options: None,
capabilities: ClientCapabilities {
workspace: None,
text_document: None,
experimental: None,
window: None,
general: Option::None,
},
trace: None,
workspace_folders: None,
locale: Option::None,
};
let on_response = |resp: Option<Response>| {
assert!(resp.is_some());
let respu = resp.unwrap();
match respu.result_or_error {
ResponseResult::Result(_) => {}
ResponseResult::Error(e) => {
panic!("expected ResponseResult::Result(..), got {:?}", e)
}
}
};
let completable = MethodCompletable::new(ResponseCompletable::new(Some(Id::Number(1)), Box::new(on_response)));
server.initialize(initialize_params, completable);
assert_eq!(server.root, tmp_path);
assert_eq!(server.graph.borrow().graph.edge_count(), 0);
assert_eq!(server.graph.borrow().graph.node_count(), 0);
server.endpoint.request_shutdown();
}
#[allow(deprecated)]
#[test]
#[logging_macro::log_scope]
fn test_01_initialize() {
let mut server = new_temp_server(None);
let (_tmp_dir, tmp_path) = copy_to_tmp_dir("./testdata/01");
let initialize_params = InitializeParams {
process_id: None,
root_path: None,
root_uri: Some(Url::from_directory_path(tmp_path.clone()).unwrap()),
client_info: None,
initialization_options: None,
capabilities: ClientCapabilities {
workspace: None,
text_document: None,
experimental: None,
window: None,
general: Option::None,
},
trace: None,
workspace_folders: None,
locale: Option::None,
};
let on_response = |resp: Option<Response>| {
assert!(resp.is_some());
let respu = resp.unwrap();
match respu.result_or_error {
ResponseResult::Result(_) => {}
ResponseResult::Error(e) => {
panic!("expected ResponseResult::Result(..), got {:?}", e)
}
}
};
let completable = MethodCompletable::new(ResponseCompletable::new(Some(Id::Number(1)), Box::new(on_response)));
server.initialize(initialize_params, completable);
server.endpoint.request_shutdown();
// Assert there is one edge between two nodes
assert_eq!(server.graph.borrow().graph.edge_count(), 1);
let edge = server.graph.borrow().graph.edge_indices().next().unwrap();
let (node1, node2) = server.graph.borrow().graph.edge_endpoints(edge).unwrap();
// Assert the values of the two nodes in the tree
assert_eq!(
server.graph.borrow().graph[node1],
//format!("{:?}/{}/{}", tmp_path, "shaders", "final.fsh")
tmp_path.join("shaders").join("final.fsh").to_str().unwrap().to_string()
);
assert_eq!(
server.graph.borrow().graph[node2],
//format!("{:?}/{}/{}", tmp_path, "shaders", "common.glsl")
tmp_path.join("shaders").join("common.glsl").to_str().unwrap().to_string()
);
assert_eq!(server.graph.borrow().graph.edge_weight(edge).unwrap().line, 2);
}
#[allow(deprecated)]
#[test]
#[logging_macro::log_scope]
fn test_05_initialize() {
let mut server = new_temp_server(None);
let (_tmp_dir, tmp_path) = copy_to_tmp_dir("./testdata/05");
let initialize_params = InitializeParams {
process_id: None,
root_path: None,
root_uri: Some(Url::from_directory_path(tmp_path.clone()).unwrap()),
client_info: None,
initialization_options: None,
capabilities: ClientCapabilities {
workspace: None,
text_document: None,
experimental: None,
window: None,
general: Option::None,
},
trace: None,
workspace_folders: None,
locale: Option::None,
};
let on_response = |resp: Option<Response>| {
assert!(resp.is_some());
let respu = resp.unwrap();
match respu.result_or_error {
ResponseResult::Result(_) => {}
ResponseResult::Error(e) => {
panic!("expected ResponseResult::Result(..), got {:?}", e)
}
}
};
let completable = MethodCompletable::new(ResponseCompletable::new(Some(Id::Number(1)), Box::new(on_response)));
server.initialize(initialize_params, completable);
server.endpoint.request_shutdown();
// Assert there is one edge between two nodes
assert_eq!(server.graph.borrow().graph.edge_count(), 3);
assert_eq!(server.graph.borrow().graph.node_count(), 4);
let pairs: HashSet<(PathBuf, PathBuf)> = vec![
(
tmp_path.join("shaders").join("final.fsh").to_str().unwrap().to_string().into(),
tmp_path.join("shaders").join("common.glsl").to_str().unwrap().to_string().into(),
),
(
tmp_path.join("shaders").join("final.fsh").to_str().unwrap().to_string().into(),
tmp_path
.join("shaders")
.join("test")
.join("banana.glsl")
.to_str()
.unwrap()
.to_string()
.into(),
),
(
tmp_path
.join("shaders")
.join("test")
.join("banana.glsl")
.to_str()
.unwrap()
.to_string()
.into(),
tmp_path
.join("shaders")
.join("test")
.join("burger.glsl")
.to_str()
.unwrap()
.to_string()
.into(),
),
]
.into_iter()
.collect();
for edge in server.graph.borrow().graph.edge_indices() {
let endpoints = server.graph.borrow().graph.edge_endpoints(edge).unwrap();
let first = server.graph.borrow().get_node(endpoints.0);
let second = server.graph.borrow().get_node(endpoints.1);
let contains = pairs.contains(&(first.clone(), second.clone()));
assert!(contains, "doesn't contain ({:?}, {:?})", first, second);
}
}

View file

@ -1,73 +0,0 @@
use std::path::PathBuf;
use slog_scope::trace;
use anyhow::Result;
use path_slash::PathBufExt;
use url::Url;
pub trait FromUrl {
fn from_url(u: Url) -> Self;
}
pub trait FromJson {
fn from_json(v: &serde_json::value::Value) -> Result<Self>
where
Self: Sized;
}
impl FromUrl for PathBuf {
#[cfg(target_family = "windows")]
fn from_url(u: Url) -> Self {
let path = percent_encoding::percent_decode_str(u.path().strip_prefix('/').unwrap())
.decode_utf8()
.unwrap();
trace!("converted win path from url"; "old" => u.as_str(), "new" => path.to_string());
PathBuf::from_slash(path)
}
#[cfg(target_family = "unix")]
fn from_url(u: Url) -> Self {
let path = percent_encoding::percent_decode_str(u.path()).decode_utf8().unwrap();
trace!("converted unix path from url"; "old" => u.as_str(), "new" => path.to_string());
PathBuf::from_slash(path)
}
}
impl FromJson for PathBuf {
#[cfg(target_family = "windows")]
fn from_json(v: &serde_json::value::Value) -> Result<Self>
where
Self: Sized,
{
if !v.is_string() {
return Err(anyhow::format_err!("cannot convert {:?} to PathBuf", v));
}
let path = v.to_string();
let path = percent_encoding::percent_decode_str(path.trim_start_matches('"').trim_end_matches('"').strip_prefix('/').unwrap())
.decode_utf8()?;
trace!("converted win path from json"; "old" => v.to_string(), "new" => path.to_string());
Ok(PathBuf::from_slash(path))
}
#[cfg(target_family = "unix")]
fn from_json(v: &serde_json::value::Value) -> Result<Self>
where
Self: Sized,
{
if !v.is_string() {
return Err(anyhow::format_err!("cannot convert {:?} to PathBuf", v));
}
let path = v.to_string();
let path = percent_encoding::percent_decode_str(path.trim_start_matches('"').trim_end_matches('"')).decode_utf8()?;
trace!("converted unix path from json"; "old" => v.to_string(), "new" => path.to_string());
Ok(PathBuf::from_slash(path))
}
}

View file

@ -1,11 +0,0 @@
#version 120
#line 1 1 // !!
float test() {
return 0.5;
}
#line 4 0 // !!
void main() {
gl_FragColor[0] = vec4(0.0);
}

View file

@ -1,27 +0,0 @@
#version 120
#line 1 1 // !!
int sample() {
return 5;
}
#line 1 2 // !!
void burger() {
// sample text
}
#line 6 1 // !!
#line 1 3 // !!
float test() {
return 3.0;
}
#line 8 1 // !!
int sample_more() {
return 5;
}
#line 4 0 // !!
void main() {
gl_FragColor = vec4(1.0, 1.0, 1.0, 1.0);
}

View file

@ -1,23 +0,0 @@
#version 120
#line 1 1 // !!
int sample() {
return 5;
}
#line 1 2 // !!
void burger() {
// sample text
}
#line 6 1 // !!
#line 1 3 // !!
float test() {
return 3.0;
}
#line 8 1 // !!
#line 4 0 // !!
void main() {
gl_FragColor = vec4(1.0, 1.0, 1.0, 1.0);
}

View file

@ -1,23 +0,0 @@
#version 120
#line 1 1 // !!
#line 1 2 // !!
void stuff1() {
}
#line 2 1 // !!
#line 1 3 // !!
void stuff2() {
}
#line 3 1 // !!
#line 4 0 // !!
#line 1 4 // !!
void matrix() {
}
#line 5 0 // !!
void main() {
}

View file

@ -1,17 +0,0 @@
#version 120
#ifdef BANANA
#line 1 1 // !!
int test() {
return 1;
}
#line 5 0 // !!
#else
#line 1 1 // !!
int test() {
return 1;
}
#line 7 0 // !!
#endif
void main() {}

26
server/opengl/Cargo.toml Normal file
View file

@ -0,0 +1,26 @@
[package]
name = "opengl"
version = "0.9.8"
authors = ["Noah Santschi-Cooney <noah@santschi-cooney.ch>"]
edition = "2021"
[lib]
doctest = false
[dependencies]
glutin = "0.28"
gl = "0.14"
url = "2.2"
filesystem = { path = "../filesystem" }
graph = { path = "../graph" }
tower-lsp = "0.17.0"
regex = "1.4"
mockall = "0.11"
logging = { path = "../logging" }
sourcefile = { path = "../sourcefile" }
[dev-dependencies]
# workspace = { path = "../workspace" }
logging_macro = { path = "../logging_macro" }
tokio = { version = "1.18", features = ["fs"]}
trim-margin = "0.1"

View file

@ -1,27 +1,25 @@
use std::{collections::HashMap, lazy::OnceCell, path::Path};
use std::collections::HashMap;
use core::cell::OnceCell;
use filesystem::NormalizedPathBuf;
use logging::debug;
use regex::Regex;
use rust_lsp::lsp_types::{Diagnostic, DiagnosticSeverity, Position, Range};
use slog_scope::debug;
use tower_lsp::lsp_types::*;
use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity};
use url::Url;
use crate::{
consts,
graph::CachedStableGraph,
opengl,
source_mapper::{SourceMapper, SourceNum},
};
use crate::ShaderValidator;
use sourcefile::{SourceMapper, SourceNum};
pub struct DiagnosticsParser<'a, T: opengl::ShaderValidator + ?Sized> {
line_offset: OnceCell<u32>,
pub struct DiagnosticsParser<'a, T: ShaderValidator + ?Sized> {
// line_offset: OnceCell<u32>,
line_regex: OnceCell<Regex>,
vendor_querier: &'a T,
}
impl<'a, T: opengl::ShaderValidator + ?Sized> DiagnosticsParser<'a, T> {
impl<'a, T: ShaderValidator + ?Sized> DiagnosticsParser<'a, T> {
pub fn new(vendor_querier: &'a T) -> Self {
DiagnosticsParser {
line_offset: OnceCell::new(),
// line_offset: OnceCell::new(),
line_regex: OnceCell::new(),
vendor_querier,
}
@ -32,20 +30,26 @@ impl<'a, T: opengl::ShaderValidator + ?Sized> DiagnosticsParser<'a, T> {
"NVIDIA Corporation" => {
Regex::new(r#"^(?P<filepath>\d+)\((?P<linenum>\d+)\) : (?P<severity>error|warning) [A-C]\d+: (?P<output>.+)"#).unwrap()
}
_ => Regex::new(r#"^(?P<severity>ERROR|WARNING): (?P<filepath>[^?<>*|"\n]+):(?P<linenum>\d+): (?:'.*' :|[a-z]+\(#\d+\)) +(?P<output>.+)$"#)
_ => Regex::new(
r#"^(?P<severity>ERROR|WARNING): (?P<filepath>[^?<>*|"\n]+):(?P<linenum>\d+): (?:'.*' :|[a-z]+\(#\d+\)) +(?P<output>.+)$"#,
)
.unwrap(),
})
}
fn get_line_offset(&self) -> u32 {
*self.line_offset.get_or_init(|| match self.vendor_querier.vendor().as_str() {
"ATI Technologies" => 0,
_ => 1,
})
}
// fn get_line_offset(&self) -> u32 {
// *self.line_offset.get_or_init(|| match self.vendor_querier.vendor().as_str() {
// "ATI Technologies" | "ATI Technologies Inc." | "AMD" => 0,
// _ => 1,
// })
// }
pub fn parse_diagnostics_output(
&self, output: String, uri: &Path, source_mapper: &SourceMapper, graph: &CachedStableGraph,
&self,
output: String,
uri: &NormalizedPathBuf,
source_mapper: &SourceMapper<NormalizedPathBuf>,
// graph: &CachedStableGraph<NormalizedPathBuf, IncludeLine>,
) -> HashMap<Url, Vec<Diagnostic>> {
let output_lines = output.split('\n').collect::<Vec<&str>>();
let mut diagnostics: HashMap<Url, Vec<Diagnostic>> = HashMap::with_capacity(output_lines.len());
@ -65,7 +69,7 @@ impl<'a, T: opengl::ShaderValidator + ?Sized> DiagnosticsParser<'a, T> {
let line = match diagnostic_capture.name("linenum") {
Some(c) => c.as_str().parse::<u32>().unwrap_or(0),
None => 0,
} - self.get_line_offset();
};
// TODO: line matching maybe
/* let line_text = source_lines[line as usize];
@ -83,27 +87,22 @@ impl<'a, T: opengl::ShaderValidator + ?Sized> DiagnosticsParser<'a, T> {
let origin = match diagnostic_capture.name("filepath") {
Some(o) => {
let source_num: SourceNum = o.as_str().parse::<usize>().unwrap().into();
let graph_node = source_mapper.get_node(source_num);
graph.get_node(graph_node).to_str().unwrap().to_string()
source_mapper.get_node(source_num)
}
None => uri.to_str().unwrap().to_string(),
None => uri,
};
let diagnostic = Diagnostic {
range: Range::new(
/* Position::new(line, leading_whitespace as u64),
Position::new(line, line_text.len() as u64) */
Position::new(line, 0),
Position::new(line, 1000),
Position::new(line-1, 0),
Position::new(line-1, 1000),
),
code: None,
severity: Some(severity),
source: Some(consts::SOURCE.into()),
source: Some("mcglsl".to_string()),
message: msg.trim().into(),
related_information: None,
tags: None,
code_description: Option::None,
data: Option::None,
..Default::default()
};
let origin_url = Url::from_file_path(origin).unwrap();
@ -120,75 +119,68 @@ impl<'a, T: opengl::ShaderValidator + ?Sized> DiagnosticsParser<'a, T> {
#[cfg(test)]
mod diagnostics_test {
use std::path::PathBuf;
use slog::slog_o;
use filesystem::NormalizedPathBuf;
use sourcefile::SourceMapper;
use trim_margin::MarginTrimmable;
use url::Url;
use crate::{
diagnostics_parser::DiagnosticsParser, opengl::MockShaderValidator, source_mapper::SourceMapper, test::new_temp_server,
};
use crate::{diagnostics_parser::DiagnosticsParser, MockShaderValidator};
#[test]
#[logging_macro::log_scope]
#[logging_macro::scope]
fn test_nvidia_diagnostics() {
slog_scope::scope(&slog_scope::logger().new(slog_o!("driver" => "nvidia")), || {
logging::scope(&logging::logger().new(slog_o!("driver" => "nvidia")), || {
let mut mockgl = MockShaderValidator::new();
mockgl.expect_vendor().returning(|| "NVIDIA Corporation".into());
let server = new_temp_server(Some(Box::new(mockgl)));
let output = "0(9) : error C0000: syntax error, unexpected '}', expecting ',' or ';' at token \"}\"";
#[cfg(target_family = "unix")]
let path: PathBuf = "/home/noah/.minecraft/shaderpacks/test/shaders/final.fsh".into();
let path: NormalizedPathBuf = "/home/noah/.minecraft/shaderpacks/test/shaders/final.fsh".into();
#[cfg(target_family = "windows")]
let path: PathBuf = "c:\\home\\noah\\.minecraft\\shaderpacks\\test\\shaders\\final.fsh".into();
let path: NormalizedPathBuf = "c:\\home\\noah\\.minecraft\\shaderpacks\\test\\shaders\\final.fsh".into();
let mut source_mapper = SourceMapper::new(0);
source_mapper.get_num(server.graph.borrow_mut().add_node(&path));
source_mapper.get_num(&path);
let parser = DiagnosticsParser::new(server.opengl_context.as_ref());
let parser = DiagnosticsParser::new(&mockgl);
let results =
parser.parse_diagnostics_output(output.to_string(), path.parent().unwrap(), &source_mapper, &server.graph.borrow());
let results = parser.parse_diagnostics_output(output.to_string(), &path.parent().unwrap(), &source_mapper);
assert_eq!(results.len(), 1);
let first = results.into_iter().next().unwrap();
assert_eq!(first.0, Url::from_file_path(path).unwrap());
server.endpoint.request_shutdown();
});
}
#[test]
#[logging_macro::log_scope]
#[logging_macro::scope]
fn test_amd_diagnostics() {
slog_scope::scope(&slog_scope::logger().new(slog_o!("driver" => "amd")), || {
logging::scope(&logging::logger().new(slog_o!("driver" => "amd")), || {
let mut mockgl = MockShaderValidator::new();
mockgl.expect_vendor().returning(|| "ATI Technologies".into());
let server = new_temp_server(Some(Box::new(mockgl)));
let output = "ERROR: 0:1: '' : syntax error: #line
ERROR: 0:10: '' : syntax error: #line
ERROR: 0:15: 'varying' : syntax error: syntax error
";
let output = r#"
|ERROR: 0:1: '' : syntax error: #line
|ERROR: 0:10: '' : syntax error: #line
|ERROR: 0:15: 'varying' : syntax error: syntax error
"#.trim_margin().unwrap();
#[cfg(target_family = "unix")]
let path: PathBuf = "/home/noah/.minecraft/shaderpacks/test/shaders/final.fsh".into();
let path: NormalizedPathBuf = "/home/noah/.minecraft/shaderpacks/test/shaders/final.fsh".into();
#[cfg(target_family = "windows")]
let path: PathBuf = "c:\\home\\noah\\.minecraft\\shaderpacks\\test\\shaders\\final.fsh".into();
let path: NormalizedPathBuf = "c:\\home\\noah\\.minecraft\\shaderpacks\\test\\shaders\\final.fsh".into();
let mut source_mapper = SourceMapper::new(0);
source_mapper.get_num(server.graph.borrow_mut().add_node(&path));
source_mapper.get_num(&path);
let parser = DiagnosticsParser::new(server.opengl_context.as_ref());
let parser = DiagnosticsParser::new(&mockgl);
let results =
parser.parse_diagnostics_output(output.to_string(), path.parent().unwrap(), &source_mapper, &server.graph.borrow());
let results = parser.parse_diagnostics_output(output, &path.parent().unwrap(), &source_mapper);
assert_eq!(results.len(), 1);
let first = results.into_iter().next().unwrap();
assert_eq!(first.1.len(), 3);
server.endpoint.request_shutdown();
});
}
}

56
server/opengl/src/lib.rs Normal file
View file

@ -0,0 +1,56 @@
#![feature(once_cell)]
mod opengl_context;
mod opengl_context_facade;
use mockall::automock;
use opengl_context::*;
pub use opengl_context_facade::*;
pub mod diagnostics_parser;
use std::fmt::Debug;
#[automock]
pub trait ShaderValidator {
fn validate(&self, tree_type: TreeType, source: &str) -> Option<String>;
fn vendor(&self) -> String;
}
#[derive(Debug, Clone, Copy)]
pub enum GPUVendor {
NVIDIA, AMD, OTHER // and thats it folks
}
impl From<&str> for GPUVendor {
fn from(s: &str) -> Self {
match s {
"NVIDIA Corporation" => Self::NVIDIA,
"AMD" | "ATI Technologies" | "ATI Technologies Inc." => Self::AMD,
_ => Self::OTHER
}
}
}
#[derive(Debug, Clone, Copy)]
pub enum TreeType {
Fragment,
Vertex,
Geometry,
Compute,
}
impl From<&str> for TreeType {
fn from(ext: &str) -> Self {
if ext == "fsh" {
TreeType::Fragment
} else if ext == "vsh" {
TreeType::Vertex
} else if ext == "gsh" {
TreeType::Geometry
} else if ext == "csh" {
TreeType::Compute
} else {
unreachable!();
}
}
}

View file

@ -1,24 +1,18 @@
use std::ffi::{CStr, CString};
use std::ptr;
use slog_scope::info;
use glutin::platform::unix::EventLoopExtUnix;
use logging::info;
#[cfg(test)]
use mockall::automock;
use crate::ShaderValidator;
#[cfg_attr(test, automock)]
pub trait ShaderValidator {
fn validate(&self, tree_type: super::TreeType, source: &str) -> Option<String>;
fn vendor(&self) -> String;
}
pub struct OpenGlContext {
pub(crate) struct Context {
_ctx: glutin::Context<glutin::PossiblyCurrent>,
}
impl OpenGlContext {
pub fn new() -> OpenGlContext {
let events_loop = glutin::event_loop::EventLoop::new();
impl Context {
pub fn default() -> Context {
let events_loop = glutin::event_loop::EventLoop::<()>::new_any_thread();
let gl_window = glutin::ContextBuilder::new()
.build_headless(&*events_loop, glutin::dpi::PhysicalSize::new(1, 1))
.unwrap();
@ -29,7 +23,7 @@ impl OpenGlContext {
gl_window
};
let gl_ctx = OpenGlContext { _ctx: gl_window };
let gl_ctx = Context { _ctx: gl_window };
unsafe {
info!(
@ -62,15 +56,13 @@ impl OpenGlContext {
);
info.set_len((info_len - 1) as usize); // ignore null for str::from_utf8
Some(String::from_utf8(info).unwrap())
} else {
None
};
} else { None };
gl::DeleteShader(shader);
result
}
}
impl ShaderValidator for OpenGlContext {
impl ShaderValidator for Context {
fn validate(&self, tree_type: super::TreeType, source: &str) -> Option<String> {
unsafe {
match tree_type {

View file

@ -0,0 +1,87 @@
use std::{
sync::{
mpsc::{self, Receiver, SyncSender},
Arc,
},
thread,
};
use crate::{Context, ShaderValidator};
enum ClientMessage {
Validate { tree_type: crate::TreeType, source: Arc<str> },
Vendor,
}
enum ServerMessage {
Validate(Option<String>),
Vendor(String),
}
///
pub struct ContextFacade {
start_chan: SyncSender<()>,
client_tx: SyncSender<ClientMessage>,
server_rx: Receiver<ServerMessage>,
}
impl ContextFacade {
pub fn default() -> Self {
let (client_tx, client_rx) = mpsc::sync_channel::<ClientMessage>(1);
let (server_tx, server_rx) = mpsc::sync_channel::<ServerMessage>(1);
let (start_chan, start_chan_recv) = mpsc::sync_channel::<()>(1);
thread::spawn(move || {
start_chan_recv.recv().unwrap();
let opengl_ctx = Context::default();
loop {
match client_rx.recv() {
Ok(msg) => {
if let ClientMessage::Validate { tree_type, source } = msg {
server_tx
.send(ServerMessage::Validate(opengl_ctx.validate(tree_type, &source)))
.unwrap();
} else {
server_tx.send(ServerMessage::Vendor(opengl_ctx.vendor())).unwrap();
}
}
Err(_) => return,
}
start_chan_recv.recv().unwrap();
}
});
ContextFacade {
start_chan,
client_tx,
server_rx,
}
}
}
impl ShaderValidator for ContextFacade {
fn validate(&self, tree_type: crate::TreeType, source: &str) -> Option<String> {
self.start_chan.send(()).unwrap();
match self.client_tx.send(ClientMessage::Validate {
tree_type,
source: Arc::from(source),
}) {
Ok(_) => match self.server_rx.recv().unwrap() {
ServerMessage::Validate(output) => output,
ServerMessage::Vendor(_) => panic!("expected validate reply, got vendor"),
},
Err(e) => panic!("error sending vendor message: {:?}", e),
}
}
fn vendor(&self) -> String {
self.start_chan.send(()).unwrap();
match self.client_tx.send(ClientMessage::Vendor) {
Ok(_) => match self.server_rx.recv().unwrap() {
ServerMessage::Validate(_) => panic!("expected vendor reply, got validate"),
ServerMessage::Vendor(resp) => resp,
},
Err(e) => panic!("error sending vendor message: {:?}", e),
}
}
}

49
server/server/Cargo.toml Normal file
View file

@ -0,0 +1,49 @@
[package]
name = "server"
version = "0.9.8"
authors = ["Noah Santschi-Cooney <noah@santschi-cooney.ch>"]
edition = "2021"
[lib]
doctest = false
[dependencies]
serde_json = "1.0"
serde = "1.0"
walkdir = "2.3"
graph = { path = "../graph" }
lazy_static = "1.4"
regex = "1.4"
url = "2.2"
mockall = "0.11"
path-slash = "0.1"
glob = "0.3"
filesystem = { path = "../filesystem" }
# glutin = "0.28"
gl = "0.14"
anyhow = "1.0"
thiserror = "1.0"
tree-sitter = "0.20"
tree-sitter-glsl = "0.1"
logging = { path = "../logging" }
logging_macro = { path = "../logging_macro" }
tower-lsp = "0.17"
# tower-lsp = { path = "../../../Rust/tower-lsp" }
tokio = { version = "1.18", features = ["full"] }
futures = "0.3"
workspace = { path = "../workspace" }
opengl = { path = "../opengl" }
sourcefile = { path = "../sourcefile" }
[dev-dependencies]
tempdir = "0.3"
fs_extra = "1.2"
hamcrest2 = "*"
pretty_assertions = "1.1"
tower-test = "0.4"

View file

@ -0,0 +1,37 @@
use std::fs::OpenOptions;
use std::io::prelude::*;
use filesystem::NormalizedPathBuf;
use graph::{CachedStableGraph, Config, dot};
use logging::info;
// use opengl::IncludePosition;
// use serde_json::Value;
// use anyhow::{format_err, Result};
// pub(crate) fn run(root: &NormalizedPathBuf, graph: &CachedStableGraph<NormalizedPathBuf, IncludePosition>) -> Result<Option<Value>> {
// let filepath = root.join("graph.dot");
// info!("generating dot file"; "path" => &filepath);
// let mut file = OpenOptions::new().truncate(true).write(true).create(true).open(filepath).unwrap();
// let mut write_data_closure = || -> Result<(), std::io::Error> {
// file.seek(std::io::SeekFrom::Start(0))?;
// file.write_all("digraph {\n\tgraph [splines=ortho]\n\tnode [shape=box]\n".as_bytes())?;
// file.write_all(
// dot::Dot::with_config(&graph.graph, &[Config::GraphContentOnly])
// .to_string()
// .as_bytes(),
// )?;
// file.write_all("\n}".as_bytes())?;
// file.flush()?;
// file.seek(std::io::SeekFrom::Start(0))?;
// Ok(())
// };
// match write_data_closure() {
// Err(err) => Err(format_err!("error generating graphviz data: {}", err)),
// _ => Ok(None),
// }
// }

View file

@ -0,0 +1,42 @@
use std::collections::{hash_map::Entry, HashMap};
use filesystem::{LFString, NormalizedPathBuf};
use graph::{dfs, CachedStableGraph};
use logging::{logger, FutureExt};
// use opengl::{merge_views, source_mapper::SourceMapper, IncludePosition};
// use serde_json::Value;
// use anyhow::{format_err, Result};
// pub async fn run(path: &NormalizedPathBuf, graph: &mut CachedStableGraph<NormalizedPathBuf, IncludePosition>) -> Result<Option<Value>> {
// if graph.root_ancestors_for_key(path)?.is_none() {
// return Err(format_err!("'{}' is not a top-level file aka has ancestors", path));
// };
// //info!("ancestors for {}:\n\t{:?}", path, file_ancestors.iter().map(|e| graph.borrow().graph.node_weight(*e).unwrap().clone()).collect::<Vec<String>>());
// // if we are a top-level file (this has to be one of the set defined by Optifine, right?)
// // gather the list of all descendants
// let root = graph.find_node(path).unwrap();
// let mut sources = HashMap::new();
// let tree = dfs::Dfs::new(graph, root)
// .map(|result| {
// let node = result?;
// let path = &graph[node.child];
// if let Entry::Vacant(entry) = sources.entry(path.clone()) {
// let source = futures::executor::block_on(async { LFString::read(path).with_logger(logger()).await })?;
// entry.insert(source);
// };
// Ok(node)
// })
// .collect::<Result<Vec<_>>>()?;
// let mut source_mapper = SourceMapper::new(sources.len());
// let view = merge_views::MergeViewBuilder::new(&tree, &sources, graph, &mut source_mapper).build();
// eprintln!("{:?}", view);
// Ok(Some(serde_json::value::Value::String(view.to_string())))
// }

View file

@ -0,0 +1,3 @@
pub mod graph_dot;
pub mod merged_includes;
pub mod parse_tree;

View file

@ -1,32 +1,17 @@
use std::{
cell::RefCell,
fs,
path::{Path, PathBuf},
rc::Rc,
};
use std::fs;
use anyhow::{format_err, Result};
use filesystem::NormalizedPathBuf;
use logging::warn;
use serde_json::Value;
use slog_scope::warn;
use tree_sitter::{Parser, TreeCursor};
use crate::url_norm::FromJson;
use super::Invokeable;
pub struct TreeSitterSExpr {
pub tree_sitter: Rc<RefCell<Parser>>,
}
impl Invokeable for TreeSitterSExpr {
fn run_command(&self, _: &Path, arguments: &[Value]) -> Result<Value> {
let path = PathBuf::from_json(arguments.get(0).unwrap())?;
warn!("parsing"; "path" => path.to_str().unwrap().to_string());
fn run_command(path: &NormalizedPathBuf, tree_sitter: &mut Parser) -> Result<Value> {
warn!("parsing"; "path" => path);
let source = fs::read_to_string(path)?;
let tree = match self.tree_sitter.borrow_mut().parse(source, None) {
let tree = match tree_sitter.parse(source, None) {
Some(tree) => tree,
None => return Err(format_err!("tree-sitter parsing resulted in no parse tree")),
};
@ -36,7 +21,6 @@ impl Invokeable for TreeSitterSExpr {
let rendered = render_parse_tree(&mut cursor);
Ok(serde_json::value::Value::String(rendered))
}
}
fn render_parse_tree(cursor: &mut TreeCursor) -> String {
@ -76,7 +60,10 @@ fn render_parse_tree(cursor: &mut TreeCursor) -> String {
};
string += (" ".repeat(indent)
+ format!("{}{} [{}, {}] - [{}, {}]\n", field_name, display_name, start.row, start.column, end.row, end.column)
+ format!(
"{}{} [{}, {}] - [{}, {}]\n",
field_name, display_name, start.row, start.column, end.row, end.column
)
.trim_start())
.as_str();
}

5
server/server/src/lib.rs Normal file
View file

@ -0,0 +1,5 @@
#![feature(result_option_inspect)]
pub mod server;
pub use server::*;
mod commands;

880
server/server/src/server.rs Normal file
View file

@ -0,0 +1,880 @@
use std::{collections::HashMap, marker::Sync, sync::Arc};
use filesystem::NormalizedPathBuf;
// use futures::future::join_all;
use logging::{error, info, logger, trace, warn, FutureExt};
use serde_json::Value;
use tokio::sync::Mutex;
// #[cfg(test)]
// use test::Client;
// #[cfg(not(test))]
use tower_lsp::Client;
use tower_lsp::{
jsonrpc::{Error, ErrorCode, Result},
lsp_types::{
notification::{ShowMessage, TelemetryEvent},
*,
},
LanguageServer,
};
use workspace::WorkspaceManager;
// use crate::commands;
pub struct Server<G: 'static, F: 'static>
where
G: opengl::ShaderValidator + Send,
F: Fn() -> G,
{
pub client: Arc<Mutex<Client>>,
workspace_manager: Arc<Mutex<WorkspaceManager<G, F>>>,
}
impl<G, F> Server<G, F>
where
G: opengl::ShaderValidator + Send,
F: Fn() -> G,
{
pub fn new(client: Client, gl_factory: F) -> Self {
Server {
client: Arc::new(Mutex::new(client)),
workspace_manager: Arc::new(Mutex::new(WorkspaceManager::new(gl_factory))),
}
}
fn capabilities() -> ServerCapabilities {
ServerCapabilities {
definition_provider: Some(OneOf::Left(false)),
references_provider: Some(OneOf::Left(false)),
document_symbol_provider: Some(OneOf::Left(false)),
document_link_provider: /* Some(DocumentLinkOptions {
resolve_provider: None,
work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None },
}), */
None,
execute_command_provider: Some(ExecuteCommandOptions {
commands: vec!["graphDot".into(), "virtualMerge".into(), "parseTree".into()],
work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None },
}),
text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions {
open_close: Some(true),
will_save: None,
will_save_wait_until: None,
change: Some(TextDocumentSyncKind::FULL),
save: Some(TextDocumentSyncSaveOptions::SaveOptions(SaveOptions { include_text: Some(true) })),
})),
workspace: Some(WorkspaceServerCapabilities {
workspace_folders: Some(WorkspaceFoldersServerCapabilities{
supported: Some(true),
change_notifications: Some(OneOf::Left(false)),
}),
file_operations: None,
}),
semantic_tokens_provider: Some(
SemanticTokensOptions {
work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None },
legend: SemanticTokensLegend {
token_types: vec![SemanticTokenType::COMMENT],
token_modifiers: vec![],
},
range: None,
full: Some(SemanticTokensFullOptions::Bool(true)),
}
.into(),
),
..ServerCapabilities::default()
}
}
async fn publish_diagnostic(&self, diagnostics: HashMap<Url, Vec<Diagnostic>>, document_version: Option<i32>) {
let client = self.client.lock().with_logger(logger()).await;
// let mut handles = Vec::with_capacity(diagnostics.len());
for (url, diags) in diagnostics {
eprintln!("publishing to {:?} {:?}", &url, diags);
/* handles.push( */
client.publish_diagnostics(url, diags, document_version).with_logger(logger()).await;
client
.log_message(MessageType::INFO, "PUBLISHING!")
.with_logger(logger())
.await;
// client.send_notification::<PublishDiagnostics>(PublishDiagnosticsParams {
// ri: url,
// diagnostics: diags,
// // version: document_version,
// version: None,
// }).await/* ) */;
}
// join_all(handles).with_logger(logger()).await;
eprintln!("published")
}
}
#[tower_lsp::async_trait]
impl<G, F> LanguageServer for Server<G, F>
where
G: opengl::ShaderValidator + Send,
F: Fn() -> G + Send + Sync,
{
#[logging::with_trace_id]
async fn initialize(&self, params: InitializeParams) -> Result<InitializeResult> {
info!("starting server...");
let capabilities = Server::<G, F>::capabilities();
let root: NormalizedPathBuf = match params.root_uri {
Some(uri) => uri.into(),
None => {
return Err(Error {
code: ErrorCode::InvalidParams,
message: "Must be in workspace".into(),
data: Some(serde_json::to_value(InitializeError { retry: false }).unwrap()),
});
}
};
let mut manager = self.workspace_manager.lock().with_logger(logger()).await;
// self.client
// .lock()
// .with_logger(logger())
// .await
// .send_notification::<TelemetryEvent>(serde_json::json!({
// "status": "loading",
// "message": "Building dependency graph...",
// "icon": "$(loading~spin)",
// }))
// .with_logger(logger())
// .await;
manager.gather_workspaces(&root).with_logger(logger()).await;
// self.client
// .lock()
// .with_logger(logger())
// .await
// .send_notification::<TelemetryEvent>(serde_json::json!({
// "status": "ready",
// "message": "Project(s) initialized...",
// "icon": "$(check)",
// }))
// .with_logger(logger())
// .await;
Ok(InitializeResult {
capabilities,
server_info: None,
})
}
async fn initialized(&self, _: InitializedParams) {
// self.client
// .lock()
// .with_logger(logger())
// .await
// .log_message(MessageType::INFO, "command executed!")
// .with_logger(logger())
// .await;
}
async fn shutdown(&self) -> Result<()> {
warn!("shutting down language server...");
Ok(())
}
#[logging::with_trace_id]
async fn did_open(&self, params: DidOpenTextDocumentParams) {
self.client
.lock()
.with_logger(logger())
.await
.log_message(MessageType::INFO, "OPENED!")
.with_logger(logger())
.await;
self.client
.lock()
.with_logger(logger())
.await
.send_notification::<TelemetryEvent>(serde_json::json!({
"status": "ready",
"message": "Project(s) initialized...",
"icon": "$(check)",
}))
.with_logger(logger())
.await;
info!("opened document"; "uri" => params.text_document.uri.as_str());
let path: NormalizedPathBuf = params.text_document.uri.into();
if let Some(workspace) = self
.workspace_manager
.lock()
.with_logger(logger())
.await
.find_workspace_for_file(&path)
{
trace!("found workspace"; "root" => &workspace.root);
workspace.refresh_graph_for_file(&path).with_logger(logger()).await;
match workspace.lint(&path).with_logger(logger()).await {
Ok(diagnostics) => self.publish_diagnostic(diagnostics, None).with_logger(logger()).await,
Err(e) => error!("error linting"; "error" => format!("{:?}", e), "path" => &path),
}
}
}
#[logging::with_trace_id]
async fn did_save(&self, params: DidSaveTextDocumentParams) {
info!("saved document"; "uri" => params.text_document.uri.as_str());
let path: NormalizedPathBuf = params.text_document.uri.into();
match self
.workspace_manager
.lock()
.with_logger(logger())
.await
.find_workspace_for_file(&path)
{
Some(workspace) => {
trace!("found workspace"; "root" => &workspace.root);
workspace.refresh_graph_for_file(&path).with_logger(logger()).await;
match workspace.lint(&path).with_logger(logger()).await {
Ok(diagnostics) => self.publish_diagnostic(diagnostics, None).with_logger(logger()).await,
Err(e) => error!("error linting"; "error" => format!("{:?}", e), "path" => &path),
}
}
None => warn!("no workspace found"; "path" => path),
}
}
#[logging::with_trace_id]
async fn execute_command(&self, params: ExecuteCommandParams) -> Result<Option<Value>> {
match params.command.as_str() {
// "graphDot" => {
// let document_path: NormalizedPathBuf = params.arguments.first().unwrap().try_into().unwrap();
// let manager = self.workspace_manager.lock().with_logger(logger()).await;
// let workspace = manager.find_workspace_for_file(&document_path).unwrap();
// let graph = workspace.graph.lock().with_logger(logger()).await;
// commands::graph_dot::run(&workspace.root, &graph).map_err(|e| Error {
// code: ErrorCode::InternalError,
// message: format!("{:?}", e),
// data: None,
// })
// }
// "virtualMerge" => {
// let document_path: NormalizedPathBuf = params.arguments.first().unwrap().try_into().unwrap();
// let manager = self.workspace_manager.lock().with_logger(logger()).await;
// let workspace = manager.find_workspace_for_file(&document_path).unwrap();
// let mut graph = workspace.graph.lock().with_logger(logger()).await;
// commands::merged_includes::run(&document_path, &mut graph)
// .with_logger(logger())
// .await
// .map_err(|e| Error {
// code: ErrorCode::InternalError,
// message: format!("{:?}", e),
// data: None,
// })
// }
// "parseTree",
_ => Err(Error {
code: ErrorCode::InternalError,
message: "command doesn't exist".into(),
data: None,
}),
}
.inspect_err(|e| {
futures::executor::block_on(async {
self.client
.lock()
.with_logger(logger())
.await
.send_notification::<ShowMessage>(ShowMessageParams {
typ: MessageType::ERROR,
message: format!("Failed to execute `{}`: {}.", params.command, e),
})
.with_logger(logger())
.await;
});
})
.inspect(|_| {
futures::executor::block_on(async {
self.client
.lock()
.with_logger(logger())
.await
.send_notification::<ShowMessage>(ShowMessageParams {
typ: MessageType::INFO,
message: format!("Command `{}` executed successfully.", params.command),
})
.with_logger(logger())
.await;
});
})
}
async fn goto_definition(&self, _params: GotoDefinitionParams) -> Result<Option<GotoDefinitionResponse>> {
/* logging::slog_with_trace_id(|| {
let path = PathBuf::from_url(params.text_document.uri);
if !path.starts_with(&self.root) {
return;
}
let parser = &mut self.tree_sitter.borrow_mut();
let parser_ctx = match navigation::ParserContext::new(parser, &path) {
Ok(ctx) => ctx,
Err(e) => {
return completable.complete(Err(MethodError {
code: 42069,
message: format!("error building parser context: error={}, path={:?}", e, path),
data: (),
}))
}
};
match parser_ctx.find_definitions(&path, params.position) {
Ok(locations) => completable.complete(Ok(locations.unwrap_or_default())),
Err(e) => completable.complete(Err(MethodError {
code: 42069,
message: format!("error finding definitions: error={}, path={:?}", e, path),
data: (),
})),
}
});
} */
Ok(None)
}
async fn references(&self, _params: ReferenceParams) -> Result<Option<Vec<Location>>> {
/* logging::slog_with_trace_id(|| {
let path = PathBuf::from_url(params.text_document_position.text_document.uri);
if !path.starts_with(&self.root) {
return;
}
let parser = &mut self.tree_sitter.borrow_mut();
let parser_ctx = match navigation::ParserContext::new(parser, &path) {
Ok(ctx) => ctx,
Err(e) => {
return completable.complete(Err(MethodError {
code: 42069,
message: format!("error building parser context: error={}, path={:?}", e, path),
data: (),
}))
}
};
match parser_ctx.find_references(&path, params.text_document_position.position) {
Ok(locations) => completable.complete(Ok(locations.unwrap_or_default())),
Err(e) => completable.complete(Err(MethodError {
code: 42069,
message: format!("error finding definitions: error={}, path={:?}", e, path),
data: (),
})),
}
}); */
Ok(None)
}
async fn document_symbol(&self, _params: DocumentSymbolParams) -> Result<Option<DocumentSymbolResponse>> {
/* logging::slog_with_trace_id(|| {
let path = PathBuf::from_url(params.text_document.uri);
if !path.starts_with(&self.root) {
return;
}
let parser = &mut self.tree_sitter.borrow_mut();
let parser_ctx = match navigation::ParserContext::new(parser, &path) {
Ok(ctx) => ctx,
Err(e) => {
return completable.complete(Err(MethodError {
code: 42069,
message: format!("error building parser context: error={}, path={:?}", e, path),
data: (),
}))
}
};
match parser_ctx.list_document_symbols(&path) {
Ok(symbols) => completable.complete(Ok(DocumentSymbolResponse::from(symbols.unwrap_or_default()))),
Err(e) => {
return completable.complete(Err(MethodError {
code: 42069,
message: format!("error finding definitions: error={}, path={:?}", e, path),
data: (),
}))
}
}
}); */
Ok(None)
}
async fn document_link(&self, _params: DocumentLinkParams) -> Result<Option<Vec<DocumentLink>>> {
/* logging::slog_with_trace_id(|| {
// node for current document
let curr_doc = PathBuf::from_url(params.text_document.uri);
let node = match self.graph.borrow_mut().find_node(&curr_doc) {
Some(n) => n,
None => {
warn!("document not found in graph"; "path" => curr_doc.to_str().unwrap());
completable.complete(Ok(vec![]));
return;
}
};
let edges: Vec<DocumentLink> = self
.graph
.borrow()
.child_node_indexes(node)
.filter_map::<Vec<DocumentLink>, _>(|child| {
let graph = self.graph.borrow();
graph.get_child_positions(node, child).map(|value| {
let path = graph.get_node(child);
let url = match Url::from_file_path(&path) {
Ok(url) => url,
Err(e) => {
error!("error converting into url"; "path" => path.to_str().unwrap(), "error" => format!("{:?}", e));
return None;
}
};
Some(DocumentLink {
range: Range::new(
Position::new(u32::try_from(value.line).unwrap(), u32::try_from(value.start).unwrap()),
Position::new(u32::try_from(value.line).unwrap(), u32::try_from(value.end).unwrap()),
),
target: Some(url.clone()),
tooltip: Some(url.path().to_string()),
data: None,
})
}).collect()
})
.flatten()
.collect();
debug!("document link results";
"links" => format!("{:?}", edges.iter().map(|e| (e.range, e.target.as_ref().unwrap().path())).collect::<Vec<_>>()),
"path" => curr_doc.to_str().unwrap(),
);
completable.complete(Ok(edges));
}); */
Ok(None)
}
async fn did_change_configuration(&self, _params: DidChangeConfigurationParams) {
eprintln!("got notif");
/* logging::slog_with_trace_id(|| {
#[derive(Deserialize)]
struct Configuration {
#[serde(alias = "logLevel")]
log_level: String,
}
let config: Configuration = from_value(params.settings.as_object().unwrap().get("mcglsl").unwrap().to_owned()).unwrap();
info!("got updated configuration"; "config" => params.settings.as_object().unwrap().get("mcglsl").unwrap().to_string());
configuration::handle_log_level_change(config.log_level, |level| {
self.log_guard = None; // set to None so Drop is invoked
self.log_guard = Some(logging::set_logger_with_level(level));
})
}); */
}
}
#[allow(unused)]
#[cfg(test)]
mod test {
use std::collections::HashSet;
use std::fs;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use filesystem::NormalizedPathBuf;
use logging::warn;
use opengl::MockShaderValidator;
use pretty_assertions::assert_eq;
use serde_json::json;
use serde_json::Value;
use tempdir::TempDir;
use fs_extra::{copy_items, dir};
use tower_lsp::lsp_types::Diagnostic;
use tower_lsp::ClientSocket;
use tower_lsp::LanguageServer;
use tower_lsp::LspService;
use tower_test::mock::Spawn;
use url::Url;
use crate::server::Server;
// implements a noop client for testing sake
pub struct Client;
impl Client {
pub async fn send_notification<N>(&self, _: N::Params)
where
N: tower_lsp::lsp_types::notification::Notification,
{
}
pub async fn publish_diagnostics(&self, uri: Url, diags: Vec<Diagnostic>, version: Option<i32>) {}
}
pub fn new_temp_server<F>(gl_factory: F) -> Server<MockShaderValidator, F>
where
F: Fn() -> MockShaderValidator + Send + Sync,
{
Server::new(Client {}, gl_factory)
}
#[macro_export]
macro_rules! assert_exchange {
($service:expr, $request:expr, $response:expr, $method:path) => {
assert_eq!($method($service, $request).await, $response);
};
}
fn copy_to_tmp_dir(test_path: &str) -> (TempDir, PathBuf) {
let tmp_dir = TempDir::new("mcshader").unwrap();
fs::create_dir(tmp_dir.path().join("shaders")).unwrap();
{
let test_path = Path::new(test_path)
.canonicalize()
.unwrap_or_else(|_| panic!("canonicalizing '{}'", test_path));
let opts = &dir::CopyOptions::new();
let files = fs::read_dir(&test_path)
.unwrap()
.map(|e| String::from(e.unwrap().path().to_str().unwrap()))
.collect::<Vec<String>>();
copy_items(&files, &tmp_dir.path().join("shaders"), opts).unwrap();
}
let tmp_path = tmp_dir.path().to_str().unwrap().into();
(tmp_dir, tmp_path)
}
#[tokio::test]
#[logging_macro::scope]
async fn test_empty_initialize() {
let mut server = new_temp_server(MockShaderValidator::new);
let tmp_dir = TempDir::new("mcshader").unwrap();
let tmp_path = tmp_dir.path();
let init_req = initialize::request(tmp_path);
let init_resp = Ok(initialize::response());
assert_exchange!(&server, init_req, init_resp, Server::initialize);
assert_eq!(server.workspace_manager.lock().await.workspaces().len(), 0);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
#[logging_macro::scope]
async fn test_01_initialize() {
let mut server = new_temp_server(MockShaderValidator::new);
let (_tmp_dir, tmp_path) = copy_to_tmp_dir("../testdata/01");
let init_req = initialize::request(&tmp_path);
let init_resp = Ok(initialize::response());
assert_exchange!(&server, init_req, init_resp, Server::initialize);
let manager = server.workspace_manager.lock().await;
let workspaces = manager.workspaces();
assert_eq!(
workspaces.iter().map(|w| w.root.to_string()).collect::<Vec<String>>(),
vec![tmp_path.to_str().unwrap()]
);
// let workspace = workspaces.first().unwrap();
// let graph = workspace.graph.lock().await;
// // Assert there is one edge between two nodes
// assert_eq!(graph.inner().edge_count(), 1);
// let edge = graph.inner().edge_indices().next().unwrap();
// let (node1, node2) = graph.inner().edge_endpoints(edge).unwrap();
// // Assert the values of the two nodes in the tree
// assert_eq!(graph.inner()[node1], tmp_path.join("shaders").join("final.fsh").into());
// assert_eq!(graph.inner()[node2], tmp_path.join("shaders").join("common.glsl").into());
// assert_eq!(graph.inner().edge_weight(edge).unwrap().line, 2);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
#[logging_macro::scope]
async fn test_05_initialize() {
let mut server = new_temp_server(MockShaderValidator::new);
let (_tmp_dir, tmp_path) = copy_to_tmp_dir("../testdata/05");
let init_req = initialize::request(&tmp_path);
let init_resp = Ok(initialize::response());
assert_exchange!(&server, init_req, init_resp, Server::initialize);
let manager = server.workspace_manager.lock().await;
let workspaces = manager.workspaces();
assert_eq!(
workspaces.iter().map(|w| w.root.to_string()).collect::<Vec<String>>(),
vec![tmp_path.to_str().unwrap()]
);
// let workspace = workspaces.first().unwrap();
// let graph = workspace.graph.lock().await;
// // Assert there is one edge between two nodes
// assert_eq!(graph.inner().edge_count(), 3);
// assert_eq!(graph.inner().node_count(), 4);
// let pairs: HashSet<(NormalizedPathBuf, NormalizedPathBuf)> = vec![
// (
// tmp_path.join("shaders").join("final.fsh").into(),
// tmp_path.join("shaders").join("common.glsl").into(),
// ),
// (
// tmp_path.join("shaders").join("final.fsh").into(),
// tmp_path.join("shaders").join("test").join("banana.glsl").into(),
// ),
// (
// tmp_path.join("shaders").join("test").join("banana.glsl").into(),
// tmp_path.join("shaders").join("test").join("burger.glsl").into(),
// ),
// ]
// .into_iter()
// .collect();
// for edge in graph.inner().edge_indices() {
// let endpoints = graph.inner().edge_endpoints(edge).unwrap();
// let first = &graph[endpoints.0];
// let second = &graph[endpoints.1];
// let contains = pairs.contains(&(first.clone(), second.clone()));
// assert!(contains, "doesn't contain ({:?}, {:?})", first, second);
// }
}
#[macro_export]
macro_rules! from_request {
($($json:tt)+) => {
{
use tower_lsp::jsonrpc;
use serde_json::{json, Value};
serde_json::from_value::<jsonrpc::Request>(json!($($json)+))
.map(|msg| msg.params().unwrap().clone())
.map(|value| serde_json::from_value(value))
.unwrap()
.unwrap()
}
};
}
#[macro_export]
macro_rules! from_response {
($($json:tt)+) => {
{
use tower_lsp::jsonrpc;
use serde_json::{json, Value};
serde_json::from_value::<jsonrpc::Response>(json!($($json)+))
.map(|msg| msg.result().unwrap().clone())
.map(|value| serde_json::from_value(value))
.unwrap()
.unwrap()
}
};
}
pub mod exit {
use serde_json::{json, Value};
pub fn notification() -> Value {
json!({
"jsonrpc": "2.0",
"method": "exit",
})
}
}
pub mod initialize {
use std::path::Path;
use tower_lsp::{jsonrpc, lsp_types};
use url::Url;
pub fn request(root: &Path) -> lsp_types::InitializeParams {
from_request!({
"jsonrpc": "2.0",
"method": "initialize",
"params": {
"rootUri": Url::from_directory_path(root).unwrap(),
"capabilities":{},
},
"id": 1,
})
}
pub fn response() -> lsp_types::InitializeResult {
use crate::server::Server;
use opengl::MockShaderValidator;
from_response!({
"jsonrpc": "2.0",
"result": {
"capabilities": Server::<MockShaderValidator, fn() -> MockShaderValidator>::capabilities(),
},
"id": 1,
})
}
}
pub mod initialized {
use tower_lsp::lsp_types;
pub fn notification() -> lsp_types::InitializedParams {
from_request!({
"jsonrpc": "2.0",
"method": "initialized",
"params": {},
})
}
}
/* pub mod text_document {
pub mod did_change {
pub mod notification {
use serde_json::{json, Value};
use tower_lsp::lsp_types::*;
pub fn entire<S: AsRef<str>>(uri: &Url, text: S) -> Value {
json!({
"jsonrpc": "2.0",
"method": "textDocument/didChange",
"params": {
"textDocument": {
"uri": uri,
},
"contentChanges": [
{
"text": text.as_ref(),
}
],
},
})
}
}
}
pub mod did_close {
use serde_json::{json, Value};
use tower_lsp::lsp_types::*;
pub fn notification(uri: &Url) -> Value {
json!({
"jsonrpc": "2.0",
"method": "textDocument/didClose",
"params": {
"textDocument": {
"uri": uri,
},
},
})
}
}
pub mod did_open {
use serde_json::{json, Value};
use tower_lsp::lsp_types::*;
pub fn notification<S: AsRef<str>, T: AsRef<str>>(uri: &Url, language_id: S, version: i64, text: T) -> Value {
json!({
"jsonrpc": "2.0",
"method": "textDocument/didOpen",
"params": {
"textDocument": {
"uri": uri,
"languageId": language_id.as_ref(),
"version": version,
"text": text.as_ref(),
},
},
})
}
}
pub mod document_symbol {
use serde_json::{json, Value};
use tower_lsp::lsp_types::*;
pub fn request(uri: &Url) -> Value {
json!({
"jsonrpc": "2.0",
"method": "textDocument/documentSymbol",
"params": {
"textDocument": {
"uri": uri,
},
},
"id": 1,
})
}
pub fn response(response: DocumentSymbolResponse) -> Value {
json!({
"jsonrpc": "2.0",
"result": response,
"id": 1,
})
}
}
pub mod hover {
use serde_json::{json, Value};
use tower_lsp::lsp_types::*;
pub fn request(uri: &Url, position: Position) -> Value {
json!({
"jsonrpc": "2.0",
"method": "textDocument/hover",
"params": {
"textDocument": {
"uri": uri,
},
"position": position,
},
"id": 1,
})
}
pub fn response() -> Value {
json!({
"jsonrpc": "2.0",
"result": {
},
"id": 1,
})
}
}
pub mod publish_diagnostics {
use serde_json::{json, Value};
use tower_lsp::lsp_types::*;
pub fn notification(uri: &Url, diagnostics: &[Diagnostic]) -> Value {
json!({
"jsonrpc": "2.0",
"method": "textDocument/publishDiagnostics",
"params": {
"uri": uri,
"diagnostics": diagnostics,
},
})
}
}
} */
}

View file

@ -0,0 +1,23 @@
[package]
name = "sourcefile"
version = "0.9.8"
authors = ["Noah Santschi-Cooney <noah@santschi-cooney.ch>"]
edition = "2021"
[lib]
doctest = false
[dependencies]
anyhow = "1.0"
tower-lsp = "0.17.0"
tokio = { version = "1.18", features = ["fs"]}
logging = { path = "../logging" }
filesystem = { path = "../filesystem" }
tree-sitter = "0.20.6"
tree-sitter-glsl = "0.1.2"
[dev-dependencies]
trim-margin = "0.1"

View file

@ -0,0 +1,75 @@
#![feature(once_cell)]
mod linemap;
mod source_file;
mod source_mapper;
use std::fmt::{Debug, Display, Formatter};
pub use linemap::*;
use logging::Value;
pub use source_file::*;
pub use source_mapper::*;
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct IncludeLine(usize);
impl From<IncludeLine> for usize {
fn from(n: IncludeLine) -> Self {
n.0
}
}
impl From<usize> for IncludeLine {
fn from(n: usize) -> Self {
IncludeLine(n)
}
}
impl std::ops::Add<usize> for IncludeLine {
type Output = IncludeLine;
fn add(self, rhs: usize) -> Self::Output {
IncludeLine(self.0 + rhs)
}
}
impl PartialEq<usize> for IncludeLine {
fn eq(&self, other: &usize) -> bool {
self.0 == *other
}
}
impl Value for IncludeLine {
fn serialize(&self, record: &logging::Record, key: logging::Key, serializer: &mut dyn logging::Serializer) -> logging::Result {
self.0.serialize(record, key, serializer)
}
}
impl Debug for IncludeLine {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "{{line: {}}}", self.0)
}
}
impl Display for IncludeLine {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> {
write!(f, "{{line: {}}}", self.0)
}
}
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub enum Version {
Glsl110 = 110,
Glsl120 = 120,
Glsl130 = 130,
Glsl140 = 140,
Glsl150 = 150,
Glsl330 = 330,
Glsl400 = 400,
Glsl410 = 410,
Glsl420 = 420,
Glsl430 = 430,
Glsl440 = 440,
Glsl450 = 450,
Glsl460 = 460,
}

View file

@ -1,4 +1,4 @@
use rust_lsp::lsp_types::Position;
use tower_lsp::lsp_types::Position;
pub struct LineMap {
positions: Vec<usize>,
@ -23,12 +23,12 @@ impl LineMap {
#[cfg(test)]
mod test {
use rust_lsp::lsp_types::Position;
use tower_lsp::lsp_types::Position;
use crate::linemap::LineMap;
#[test]
#[logging_macro::log_scope]
#[logging::scope]
fn test_linemap() {
struct Test {
string: &'static str,

View file

@ -0,0 +1,194 @@
use std::collections::HashMap;
use core::cell::OnceCell;
use anyhow::Result;
use filesystem::NormalizedPathBuf;
use tree_sitter::{Parser, Query, QueryCursor, Tree};
use tree_sitter_glsl::language;
use crate::{linemap::LineMap, IncludeLine, Version};
const GET_VERSION: &str = r#"
(translation_unit
(preproc_call
(preproc_directive) @version_str
(preproc_arg) @version_num))
(#match? @version_str "\#version")
"#;
const GET_INCLUDES: &str = r#"
(preproc_include
(string_literal) @include)
"#;
pub struct SourceFile {
pub source: String,
pub path: NormalizedPathBuf,
root: NormalizedPathBuf,
linemap: OnceCell<LineMap>,
tree: OnceCell<Tree>,
// TODO: use and implement invalidation
includes: HashMap<NormalizedPathBuf, Vec<IncludeLine>>,
}
unsafe impl Send for SourceFile {}
unsafe impl Sync for SourceFile {}
impl SourceFile {
pub fn new<P, R>(source: String, path: P, root: R) -> Self
where
P: Into<NormalizedPathBuf>,
R: Into<NormalizedPathBuf>,
{
Self {
source,
path: path.into(),
root: root.into(),
linemap: OnceCell::new(),
tree: OnceCell::new(),
includes: HashMap::new(),
}
}
pub fn linemap(&self) -> &LineMap {
self.linemap.get_or_init(|| LineMap::new(&self.source))
}
pub fn version(&self) -> Result<Version> {
let query = Query::new(language(), GET_VERSION)?;
let mut query_cursor = QueryCursor::new();
let version_num_match = query_cursor
.captures(&query, self.tree().root_node(), self.source.as_bytes())
.next()
.unwrap()
.0
.captures[1];
Ok(
match version_num_match
.node
.utf8_text(self.source.as_bytes())?
.trim()
.split(' ')
.next()
.unwrap()
{
"110" => Version::Glsl110,
"120" => Version::Glsl120,
"130" => Version::Glsl130,
"140" => Version::Glsl140,
"150" => Version::Glsl150,
"330" => Version::Glsl330,
"400" => Version::Glsl400,
"410" => Version::Glsl410,
"420" => Version::Glsl420,
"430" => Version::Glsl430,
"440" => Version::Glsl440,
"450" => Version::Glsl450,
"460" => Version::Glsl460,
_ => Version::Glsl110,
},
)
}
pub fn includes(&self) -> Result<Vec<(NormalizedPathBuf, IncludeLine)>> {
let query = Query::new(language(), GET_INCLUDES)?;
let mut query_cursor = QueryCursor::new();
let mut includes = Vec::new();
for (m, _) in query_cursor.captures(&query, self.tree().root_node(), self.source.as_bytes()) {
if m.captures.is_empty() {
continue;
}
let include = m.captures[0];
let include_str = {
let mut string = include.node.utf8_text(self.source.as_bytes()).unwrap();
string = &string[1..string.len() - 1];
if string.starts_with('/') {
self.root.join("shaders").join(string.strip_prefix('/').unwrap())
} else {
self.path.parent().unwrap().join(string)
}
};
includes.push((include_str, IncludeLine(include.node.start_position().row)));
}
Ok(includes)
}
pub fn includes_of_path<'a>(&'a self, child: &'a NormalizedPathBuf) -> Result<impl Iterator<Item = IncludeLine> + '_> {
Ok(self.includes()?.into_iter().filter(move |(p, _)| p == child).map(|(_, l)| l))
}
fn tree(&self) -> &Tree {
self.tree.get_or_init(|| {
let mut parser = Parser::new();
parser.set_language(language()).unwrap();
parser.parse(&self.source, None).unwrap()
})
}
}
#[cfg(test)]
mod test {
use crate::{IncludeLine, SourceFile, Version};
use anyhow::Result;
use trim_margin::MarginTrimmable;
#[test]
fn test_versions() {
const SOURCE: &str = r#"
#version 150 core
void main() {}
"#;
let source = SourceFile::new(SOURCE.to_string(), "/asdf", "/");
assert_eq!(source.version().unwrap(), Version::Glsl150);
}
#[test]
fn test_includes() -> Result<()> {
let source = r#"
|#version 330
|
|#include "path/to/banana.fsh"
| #include "/path/to/badbanana.gsh"
"#
.trim_margin()
.unwrap();
let source = SourceFile::new(source, "/myshader/shaders/world0/asdf.fsh", "/myshader");
assert_eq!(
source.includes()?,
vec![
("/myshader/shaders/world0/path/to/banana.fsh".into(), IncludeLine(2)),
("/myshader/shaders/path/to/badbanana.gsh".into(), IncludeLine(3))
]
);
Ok(())
}
#[test]
fn test_single_includes() -> Result<()> {
let source = r#"
|#version 330
|
|#include "path/to/banana.fsh"
| #include "/path/to/badbanana.gsh"
"#
.trim_margin()
.unwrap();
let source = SourceFile::new(source, "/myshader/shaders/world0/asdf.fsh", "/myshader");
assert_eq!(
source.includes_of_path(&"/myshader/shaders/world0/path/to/banana.fsh".into())?.collect::<Vec<_>>(),
vec![IncludeLine(2)]
);
Ok(())
}
}

View file

@ -1,6 +1,4 @@
use std::{collections::HashMap, fmt::Display};
use petgraph::graph::NodeIndex;
use std::{cmp::Eq, collections::HashMap, fmt::Display, hash::Hash};
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct SourceNum(usize);
@ -17,17 +15,19 @@ impl From<usize> for SourceNum {
}
}
// Maps from a graph node index to a virtual OpenGL
// source number (for when building the merged source view),
// Maps from a key to a virtual OpenGL source number (for when building the merged source view),
// and in reverse (for when mapping from GLSL error source numbers to their source path).
// What is a source number: https://community.khronos.org/t/what-is-source-string-number/70976
pub struct SourceMapper {
pub struct SourceMapper<T> {
next: SourceNum,
mapping: HashMap<NodeIndex, SourceNum>,
reverse_mapping: Vec<NodeIndex>,
mapping: HashMap<T, SourceNum>,
reverse_mapping: Vec<T>,
}
impl SourceMapper {
impl<T> SourceMapper<T>
where
T: Eq + Hash + Clone,
{
pub fn new(capacity: usize) -> Self {
SourceMapper {
next: SourceNum(0),
@ -36,17 +36,17 @@ impl SourceMapper {
}
}
pub fn get_num(&mut self, node: NodeIndex) -> SourceNum {
let num = &*self.mapping.entry(node).or_insert_with(|| {
pub fn get_num(&mut self, node: &T) -> SourceNum {
let num = &*self.mapping.entry(node.clone()).or_insert_with(|| {
let next = self.next;
self.next.0 += 1;
self.reverse_mapping.push(node);
self.reverse_mapping.push(node.clone());
next
});
*num
}
pub fn get_node(&self, num: SourceNum) -> NodeIndex {
self.reverse_mapping[num.0]
pub fn get_node(&self, num: SourceNum) -> &T {
&self.reverse_mapping[num.0]
}
}

25
server/testdata/01/final.fsh.merge vendored Normal file
View file

@ -0,0 +1,25 @@
#version 120
#define MC_VERSION 11800
#define MC_GL_VERSION 320
#define MC_GLSL_VERSION 150
#define MC_OS_LINUX
#define MC_GL_VENDOR_NVIDIA
#define MC_GL_RENDERER_GEFORCE
#define MC_NORMAL_MAP
#define MC_SPECULAR_MAP
#define MC_RENDER_QUALITY 1.0
#define MC_SHADOW_QUALITY 1.0
#define MC_HAND_DEPTH 0.125
#define MC_OLD_HAND_LIGHT
#define MC_OLD_LIGHTING
#line 1 0 // !!
#line 0 1 // !!
float test() {
return 0.5;
}
#line 3 0 // !!
void main() {
gl_FragColor[0] = vec4(0.0);
}

0
server/testdata/01/shaders.properties vendored Normal file
View file

41
server/testdata/02/final.fsh.merge vendored Normal file
View file

@ -0,0 +1,41 @@
#version 120
#define MC_VERSION 11800
#define MC_GL_VERSION 320
#define MC_GLSL_VERSION 150
#define MC_OS_LINUX
#define MC_GL_VENDOR_NVIDIA
#define MC_GL_RENDERER_GEFORCE
#define MC_NORMAL_MAP
#define MC_SPECULAR_MAP
#define MC_RENDER_QUALITY 1.0
#define MC_SHADOW_QUALITY 1.0
#define MC_HAND_DEPTH 0.125
#define MC_OLD_HAND_LIGHT
#define MC_OLD_LIGHTING
#line 1 0 // !!
#line 0 1 // !!
int sample() {
return 5;
}
#line 0 2 // !!
void burger() {
// sample text
}
#line 5 1 // !!
#line 0 3 // !!
float test() {
return 3.0;
}
#line 7 1 // !!
int sample_more() {
return 5;
}
#line 3 0 // !!
void main() {
gl_FragColor = vec4(1.0, 1.0, 1.0, 1.0);
}

0
server/testdata/02/shaders.properties vendored Normal file
View file

37
server/testdata/03/final.fsh.merge vendored Normal file
View file

@ -0,0 +1,37 @@
#version 120
#define MC_VERSION 11800
#define MC_GL_VERSION 320
#define MC_GLSL_VERSION 150
#define MC_OS_LINUX
#define MC_GL_VENDOR_NVIDIA
#define MC_GL_RENDERER_GEFORCE
#define MC_NORMAL_MAP
#define MC_SPECULAR_MAP
#define MC_RENDER_QUALITY 1.0
#define MC_SHADOW_QUALITY 1.0
#define MC_HAND_DEPTH 0.125
#define MC_OLD_HAND_LIGHT
#define MC_OLD_LIGHTING
#line 1 0 // !!
#line 0 1 // !!
int sample() {
return 5;
}
#line 0 2 // !!
void burger() {
// sample text
}
#line 5 1 // !!
#line 0 3 // !!
float test() {
return 3.0;
}
#line 7 1 // !!
#line 3 0 // !!
void main() {
gl_FragColor = vec4(1.0, 1.0, 1.0, 1.0);
}

0
server/testdata/03/shaders.properties vendored Normal file
View file

View file

@ -1,7 +1,7 @@
#version 120
#include "/utils/utilities.glsl"
#include "/utils/matricies.glsl"
#include "/lib/matrices.glsl"
void main() {

37
server/testdata/04/final.fsh.merge vendored Normal file
View file

@ -0,0 +1,37 @@
#version 120
#define MC_VERSION 11800
#define MC_GL_VERSION 320
#define MC_GLSL_VERSION 150
#define MC_OS_LINUX
#define MC_GL_VENDOR_NVIDIA
#define MC_GL_RENDERER_GEFORCE
#define MC_NORMAL_MAP
#define MC_SPECULAR_MAP
#define MC_RENDER_QUALITY 1.0
#define MC_SHADOW_QUALITY 1.0
#define MC_HAND_DEPTH 0.125
#define MC_OLD_HAND_LIGHT
#define MC_OLD_LIGHTING
#line 1 0 // !!
#line 0 1 // !!
#line 0 2 // !!
void stuff1() {
}
#line 1 1 // !!
#line 0 3 // !!
void stuff2() {
}
#line 2 1 // !!
#line 3 0 // !!
#line 0 4 // !!
void matrix() {
}
#line 4 0 // !!
void main() {
}

0
server/testdata/04/shaders.properties vendored Normal file
View file

0
server/testdata/05/shaders.properties vendored Normal file
View file

31
server/testdata/06/final.fsh.merge vendored Normal file
View file

@ -0,0 +1,31 @@
#version 120
#define MC_VERSION 11800
#define MC_GL_VERSION 320
#define MC_GLSL_VERSION 150
#define MC_OS_LINUX
#define MC_GL_VENDOR_NVIDIA
#define MC_GL_RENDERER_GEFORCE
#define MC_NORMAL_MAP
#define MC_SPECULAR_MAP
#define MC_RENDER_QUALITY 1.0
#define MC_SHADOW_QUALITY 1.0
#define MC_HAND_DEPTH 0.125
#define MC_OLD_HAND_LIGHT
#define MC_OLD_LIGHTING
#line 1 0 // !!
#ifdef BANANA
#line 0 1 // !!
int test() {
return 1;
}
#line 4 0 // !!
#else
#line 0 1 // !!
int test() {
return 1;
}
#line 6 0 // !!
#endif
void main() {}

0
server/testdata/06/shaders.properties vendored Normal file
View file

View file

@ -0,0 +1,29 @@
[package]
name = "workspace"
version = "0.9.8"
authors = ["Noah Santschi-Cooney <noah@santschi-cooney.ch>"]
edition = "2021"
[lib]
doctest = false
[dependencies]
anyhow = "1.0"
filesystem = { path = "../filesystem" }
futures = "0.3.21"
glob = "0.3"
graph = { path = "../graph" }
# include_merger = { path = "../include_merger" }
lazy_static = "1.4"
logging = { path = "../logging" }
opengl = { path = "../opengl" }
path-slash = "0.1"
regex = "1.4"
sourcefile = { path = "../sourcefile" }
tokio = { version = "1.18.0", features = ["sync"] }
tower-lsp = "0.17.0"
tst = "0.10"
url = "2.2"
walkdir = "2.3"
workspace_tree = { path = "../workspace_tree" }
include_merger = { path = "../include_merger" }

View file

@ -0,0 +1,6 @@
#![feature(assert_matches)]
pub mod workspace;
pub mod workspace_manager;
pub use workspace::*;
pub use workspace_manager::*;

Some files were not shown because too many files have changed in this diff Show more