Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

✨Add toolbar dropdown to do remote run options #164

Merged
merged 27 commits into from
Jul 5, 2022
Merged
Show file tree
Hide file tree
Changes from 20 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
131 changes: 130 additions & 1 deletion .xircuits/config.ini
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,133 @@ BASE_PATH = xai_components

[SERVER]
IP_ADD = http://127.0.0.1
PORT = 5000
PORT = 5000

[REMOTE_EXECUTION]
# Main run types to do remote execution using subprocess module(eg. Spark submit etc.)
# This run types will be shown on the toolbar dropdown
# Separate each run type in a newline
# Uncomment 'TEST' to add a new run type
RUN_TYPES = SPARK
; TEST

[RUN_TYPES]
# This different types will be shown on each run types
# Separate each run types and insert every run configurations in a newline
SPARK = CPU
GPU
VE

# Uncomment below to add some config in TEST's run type
; TEST = EG
; EG2

[CONFIGURATION]
# Separate each config for each run types
# Make sure each config name is the same as the above. The name inside the bracket.
# Note: Create a unique name for each config types
# Note: Make sure every criteria is FILLED
[EG]
name = EG
command = Testing
msg = Testing
url = http://localhost:8088/

[EG2]
name = EG2
command = Testing2
msg = Testing2
url = http://localhost:8088/

[CPU]
name = CPU
command = $SPARK_HOME/bin/spark-submit \
--py-files venv_pyspark_cyclone.zip \
--archives venv_pyspark_cyclone.zip \
--master yarn \
--conf spark.yarn.appMasterEnv.LD_LIBRARY_PATH='/usr/local/cuda-11.2/targets/x86_64-linux/lib/:$LD_LIBRARY_PATH' \
--num-executors=8 --executor-cores=1 --executor-memory=10G --driver-memory=10G \
--name CPU_CPU_mode \
--deploy-mode cluster \
--conf spark.rpc.message.maxSize=1024 \
--conf spark.driver.maxResultSize=10G
msg = Running Spark Submit using CPU
url = http://localhost:8088/

[GPU]
name = GPU
command = $SPARK_HOME/bin/spark-submit \
--py-files venv_pyspark_cyclone.zip \
--archives venv_pyspark_cyclone.zip \
--master yarn \
--conf spark.yarn.appMasterEnv.LD_LIBRARY_PATH='/usr/local/cuda-11.2/targets/x86_64-linux/lib/:$LD_LIBRARY_PATH' \
--num-executors=16 --executor-cores=1 --executor-memory=7G --driver-memory=8G \
--name VE_GPU_mode \
--deploy-mode cluster \
--jars /opt/cyclone/spark-cyclone-sql-plugin.jar \
--conf spark.executor.extraClassPath=/opt/cyclone/spark-cyclone-sql-plugin.jar \
--conf spark.plugins=com.nec.spark.AuroraSqlPlugin \
--conf spark.com.nec.spark.kernel.directory=/opt/spark/work/cyclone \
--conf spark.sql.columnVector.offheap.enabled=true \
--conf spark.executor.resource.ve.amount=2 \
--conf spark.executor.resource.ve.discoveryScript=/opt/spark/getVEsResources.sh \
--conf spark.executorEnv.VE_OMP_NUM_THREADS=1 \
--conf spark.rpc.message.maxSize=1024 \
--conf spark.driver.maxResultSize=4G \
--conf spark.locality.wait=0 \
--conf spark.com.nec.spark.aggregate-on-ve=false \
--conf spark.com.nec.spark.sort-on-ve=true \
--conf spark.com.nec.spark.project-on-ve=false \
--conf spark.com.nec.spark.filter-on-ve=true \
--conf spark.com.nec.spark.exchange-on-ve=true \
--conf spark.com.nec.spark.join-on-ve=true \
--conf spark.com.nec.spark.pass-through-project=false \
--conf spark.com.nec.spark.fail-fast=false \
--conf spark.sql.adaptive.enabled=true \
--conf spark.sql.adaptive.coalescePartitions.enabled=true \
--conf spark.com.nec.spark.amplify-batches=true \
--conf spark.com.nec.spark.ve.columnBatchSize=512000 \
--conf spark.com.nec.spark.ve.targetBatchSizeMb=256 \
--conf spark.sql.inMemoryColumnarStorage.batchSize=512000
msg = Running Spark Submit using GPU
url = http://localhost:8088/

[VE]
name = VE
command = $SPARK_HOME/bin/spark-submit \
--py-files venv_TF_VE.zip \
--archives venv_TF_VE.zip \
--master yarn \
--conf spark.yarn.appMasterEnv.LD_LIBRARY_PATH='/usr/local/cuda-11.2/targets/x86_64-linux/lib/:$LD_LIBRARY_PATH' \
--num-executors=8 --executor-cores=1 --executor-memory=7G --driver-memory=8G \
--name VE_VE_mode \
--deploy-mode cluster \
--jars /opt/cyclone/spark-cyclone-sql-plugin.jar \
--conf spark.yarn.appMasterEnv.PYSPARK_PYTHON='/usr/local/bin/python3.8' \
--conf spark.yarn.appMasterEnv.PYSPARK_DRIVER_PYTHON='/usr/local/bin/python3.8' \
--conf spark.executor.extraClassPath=/opt/cyclone/spark-cyclone-sql-plugin.jar \
--conf spark.plugins=com.nec.spark.AuroraSqlPlugin \
--conf spark.com.nec.spark.kernel.directory=/opt/spark/work/cyclone \
--conf spark.sql.columnVector.offheap.enabled=true \
--conf spark.executor.resource.ve.amount=1 \
--conf spark.executor.resource.ve.discoveryScript=/opt/spark/getVEsResources.sh \
--conf spark.executorEnv.VE_OMP_NUM_THREADS=1 \
--conf spark.rpc.message.maxSize=1024 \
--conf spark.driver.maxResultSize=4G \
--conf spark.locality.wait=0 \
--conf spark.com.nec.spark.aggregate-on-ve=false \
--conf spark.com.nec.spark.sort-on-ve=true \
--conf spark.com.nec.spark.project-on-ve=false \
--conf spark.com.nec.spark.filter-on-ve=true \
--conf spark.com.nec.spark.exchange-on-ve=true \
--conf spark.com.nec.spark.join-on-ve=true \
--conf spark.com.nec.spark.pass-through-project=false \
--conf spark.com.nec.spark.fail-fast=false \
--conf spark.sql.adaptive.enabled=true \
--conf spark.sql.adaptive.coalescePartitions.enabled=true \
--conf spark.com.nec.spark.amplify-batches=true \
--conf spark.com.nec.spark.ve.columnBatchSize=512000 \
--conf spark.com.nec.spark.ve.targetBatchSizeMb=256 \
--conf spark.sql.inMemoryColumnarStorage.batchSize=512000
msg = Running Spark Submit using VE
url = http://localhost:8088/
10 changes: 5 additions & 5 deletions src/components/RunSwitcher.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,9 @@ export class RunSwitcher extends ReactWidget {
aria-label={'Run type'}
title={'Select the run type'}
>
<option value="run" >Run</option>
<option value="run-dont-compile">Run w/o Compile</option>
<option value="spark-submit">Spark Submit</option>
<option value="run" >Local Run</option>
<option value="run-dont-compile">Local Run w/o Compile</option>
<option value="remote-run">Remote Run</option>
</HTMLSelect>
);
}
Expand All @@ -53,9 +53,9 @@ export class RunSwitcher extends ReactWidget {
aria-label={'Run type'}
title={'Select the run type'}
>
<option value="run" >Run</option>
<option value="run" >Local Run</option>
<option value="run-dont-compile">Run w/o Compile</option>
<option value="spark-submit">Spark Submit</option>
<option value="remote-run">Remote Run</option>
</HTMLSelect>
);
}}
Expand Down
66 changes: 46 additions & 20 deletions src/components/xircuitBodyWidget.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,8 @@ export const BodyWidget: FC<BodyWidgetProps> = ({
const [displaySavedAndCompiled, setDisplaySavedAndCompiled] = useState(false);
const [displayDebug, setDisplayDebug] = useState(false);
const [displayHyperparameter, setDisplayHyperparameter] = useState(false);
const [sparkSubmitNodes, setSparkSubmitkNodes] = useState<string>("");
const [runConfigs, setRunConfigs] = useState<any>("");
const [lastConfig, setLastConfigs] = useState<any>("");
const [stringNodes, setStringNodes] = useState<string[]>(["experiment name"]);
const [intNodes, setIntNodes] = useState<string[]>([]);
const [floatNodes, setFloatNodes] = useState<string[]>([]);
Expand All @@ -178,7 +179,7 @@ export const BodyWidget: FC<BodyWidgetProps> = ({
const [inDebugMode, setInDebugMode] = useState<boolean>(false);
const [currentIndex, setCurrentIndex] = useState<number>(-1);
const [runType, setRunType] = useState<string>("run");
const [addedArgSparkSubmit, setAddedArgSparkSubmit] = useState<string>("");
const [runTypesCfg, setRunTypesCfg] = useState<string>("");
const xircuitLogger = new Log(app);
const contextRef = useRef(context);
const notInitialRender = useRef(false);
Expand Down Expand Up @@ -889,10 +890,10 @@ export const BodyWidget: FC<BodyWidgetProps> = ({
context.ready.then(async () => {
let runArgs = await handleRunDialog();
let runCommand = runArgs["commandStr"];
let addArgsSparkSubmit = runArgs["addArgs"];
let config = runArgs["config"];

if (runArgs) {
commands.execute(commandIDs.executeToOutputPanel, { runCommand, runType, addArgsSparkSubmit });
commands.execute(commandIDs.executeToOutputPanel, { runCommand, runType, config });
}
})
}
Expand Down Expand Up @@ -996,19 +997,19 @@ export const BodyWidget: FC<BodyWidgetProps> = ({
}
};

async function getConfig(request: string) {
async function getRunTypesFromConfig(request: string) {
const dataToSend = { "config_request": request };

try {
const server_reply = await requestAPI<any>('get/config', {
const server_reply = await requestAPI<any>('config/run', {
body: JSON.stringify(dataToSend),
method: 'POST',
});

return server_reply;
} catch (reason) {
console.error(
`Error on POST get/config ${dataToSend}.\n${reason}`
`Error on POST config/run ${dataToSend}.\n${reason}`
);
}
};
Expand Down Expand Up @@ -1348,16 +1349,32 @@ export const BodyWidget: FC<BodyWidgetProps> = ({
alert("Testing");
}

const getRunTypeFromConfig = async () => {
const configuration = await getRunTypesFromConfig("RUN_TYPES");
const error_msg = configuration["err_msg"];
if (error_msg) {
showDialog({
title: 'Failed parsing data from config.ini',
body: (
<pre>{error_msg}</pre>
),
buttons: [Dialog.warnButton({ label: 'OK' })]
});
}
setRunTypesCfg(configuration["run_types"])
setRunConfigs(configuration["run_types_config"]);
}

const hideRcDialog = () => {
setDisplayRcDialog(false);
}

useEffect(() => {
// Only enable added arguments when in 'Spark Submit' mode
if (runType == 'spark-submit') {
setSparkSubmitkNodes("Added Arguments")
// Get run configuration when in 'Remote Run' mode only
if (runType == 'remote-run') {
getRunTypeFromConfig();
} else {
setSparkSubmitkNodes("")
setRunConfigs("")
}

context.ready.then(() => {
Expand Down Expand Up @@ -1398,8 +1415,9 @@ export const BodyWidget: FC<BodyWidgetProps> = ({
title,
body: formDialogWidget(
<RunDialog
lastAddedArgsSparkSubmit={addedArgSparkSubmit}
childSparkSubmitNodes={sparkSubmitNodes}
runTypes={runTypesCfg}
runConfigs={runConfigs}
lastConfig={lastConfig}
childStringNodes={stringNodes}
childBoolNodes={boolNodes}
childIntNodes={intNodes}
Expand All @@ -1418,9 +1436,18 @@ export const BodyWidget: FC<BodyWidgetProps> = ({
}

let commandStr = ' ';
// Added arguments for spark submit
let addArgs = dialogResult["value"][sparkSubmitNodes] ?? "";
setAddedArgSparkSubmit(addArgs);
// Remember the last config chose and set the chosen config to output
let config;
let runType = dialogResult["value"]['runType'] ?? "";
let runConfig = dialogResult["value"]['runConfig'] ?? "";
if (runConfigs.length != 0) {
runConfigs.map(cfg => {
if (cfg.run_type == runType && cfg.run_config_name == runConfig) {
config = cfg;
setLastConfigs(cfg);
}
})
}

stringNodes.forEach((param) => {
if (param == 'experiment name') {
Expand Down Expand Up @@ -1473,8 +1500,7 @@ export const BodyWidget: FC<BodyWidgetProps> = ({
}
});
}

return { commandStr, addArgs };
return { commandStr, config };
};


Expand Down
Loading