Skip to content

Commit

Permalink
Merge pull request #539 from shengyfu/master
Browse files Browse the repository at this point in the history
Enable pyspark development in vscode
  • Loading branch information
DonJayamanne authored Nov 21, 2016
2 parents 6a5cf63 + 43e6810 commit b376060
Show file tree
Hide file tree
Showing 4 changed files with 63 additions and 2 deletions.
26 changes: 25 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,11 @@
"title": "Select Workspace Interpreter",
"category": "Python"
},
{
"command": "python.updateSparkLibrary",
"title": "Update Workspace PySpark Libraries",
"category": "Python"
},
{
"command": "python.refactorExtractVariable",
"title": "Extract Variable",
Expand Down Expand Up @@ -402,7 +407,21 @@
"stopOnEntry": true,
"pythonPath": "${config.python.pythonPath}",
"program": "${file}",
"cwd": "null",
"cwd": "${workspaceRoot}",
"debugOptions": [
"WaitOnAbnormalExit",
"WaitOnNormalExit",
"RedirectOutput"
]
},
{
"name": "PySpark",
"type": "python",
"request": "launch",
"stopOnEntry": true,
"pythonPath": "${config.python.pysparkPath}",
"program": "${file}",
"cwd": "${workspaceRoot}",
"debugOptions": [
"WaitOnAbnormalExit",
"WaitOnNormalExit",
Expand Down Expand Up @@ -548,6 +567,11 @@
"default": "python",
"description": "Path to Python, you can use a custom version of Python by modifying this setting to include the full path."
},
"python.pysparkPath": {
"type": "string",
"default": "${env.SPARK_HOME}/bin/spark-submit",
"description": "Path to spark-submit executable, you can use a custom version of Spark by modifying this setting to include the full path."
},
"python.jediPath": {
"type": "string",
"default": "",
Expand Down
2 changes: 1 addition & 1 deletion src/client/common/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ export namespace Commands {
export const Tests_Select_And_Debug_Method = 'python.selectAndDebugTestMethod';
export const Refactor_Extract_Variable = 'python.refactorExtractVariable';
export const Refaactor_Extract_Method = 'python.refactorExtractMethod';
export const Update_SparkLibrary = 'python.updateSparkLibrary';
export const Build_Workspace_Symbols = 'python.buildWorkspaceSymbols';

export namespace Jupyter {
export const Get_All_KernelSpecs_For_Language = 'jupyter:getAllKernelSpecsForLanguage';
export const Get_All_KernelSpecs = 'jupyter:getAllKernelSpecs';
Expand Down
3 changes: 3 additions & 0 deletions src/client/extension.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,13 @@ import { activateExecInTerminalProvider } from './providers/execInTerminalProvid
import * as tests from './unittests/main';
import * as jup from './jupyter/main';
import { HelpProvider } from './helpProvider';
import { activateUpdateSparkLibraryProvider } from './providers/updateSparkLibraryProvider';
import { activateFormatOnSaveProvider } from './providers/formatOnSaveProvider';
import { WorkspaceSymbols } from './workspaceSymbols/main';
import { BlockFormatProviders } from './typeFormatters/blockFormatProvider';
import * as os from 'os';


const PYTHON: vscode.DocumentFilter = { language: 'python', scheme: 'file' };
let unitTestOutChannel: vscode.OutputChannel;
let formatOutChannel: vscode.OutputChannel;
Expand All @@ -51,6 +53,7 @@ export function activate(context: vscode.ExtensionContext) {
sortImports.activate(context, formatOutChannel);
context.subscriptions.push(activateSetInterpreterProvider());
context.subscriptions.push(...activateExecInTerminalProvider());
context.subscriptions.push(activateUpdateSparkLibraryProvider());
activateSimplePythonRefactorProvider(context, formatOutChannel);
context.subscriptions.push(activateFormatOnSaveProvider(PYTHON, settings.PythonSettings.getInstance(), formatOutChannel, vscode.workspace.rootPath));

Expand Down
34 changes: 34 additions & 0 deletions src/client/providers/updateSparkLibraryProvider.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
"use strict";
import { Commands } from '../common/constants';
import * as vscode from "vscode";
import * as path from 'path';
import { IS_WINDOWS } from '../common/utils';

export function activateUpdateSparkLibraryProvider(): vscode.Disposable {
console.log('Register command python.updateSparkLibrary');
return vscode.commands.registerCommand(Commands.Update_SparkLibrary, updateSparkLibrary);
}

function updateSparkLibrary() {
const pythonConfig = vscode.workspace.getConfiguration('python');
const extraLibPath = 'autoComplete.extraPaths';
let sparkHomePath = '${env.SPARK_HOME}';
pythonConfig.update(extraLibPath, [path.join(sparkHomePath, 'python'),
path.join(sparkHomePath, 'python/pyspark')]).then(() => {
//Done
}, reason => {
vscode.window.showErrorMessage(`Failed to update ${extraLibPath}. Error: ${reason.message}`);
console.error(reason);
});
if (IS_WINDOWS) {
const pysparkPath = 'pysparkPath';
console.log('Overriding ' + pysparkPath);
pythonConfig.update(pysparkPath, path.join(sparkHomePath, 'bin', 'spark-submit.cmd')).then(() => {
//Done
}, reason => {
vscode.window.showErrorMessage(`Failed to update ${pysparkPath}. Error: ${reason.message}`);
console.error(reason);
});
}
vscode.window.showInformationMessage(`Make sure you have SPARK_HOME environment variable set to the root path of the local spark installation!`);
}

0 comments on commit b376060

Please sign in to comment.