Skip to content

Instantly share code, notes, and snippets.

View Micropathology's full-sized avatar

Micropathology

View GitHub Profile
slurmd[w3]: *** JOB 8 CANCELLED AT 2018-10-03T07:37:46 ***
slurmd[w3]: Unable to unlink domain socket: No such file or directory
slurmd[w3]: unlink(/tmp/slurm/slurmd_spool/job00008/slurm_script): No such file or directory
slurmd[w3]: rmdir(/tmp/slurm/slurmd_spool/job00008): No such file or directory
The server could not complete the request. Please contact the Galaxy Team if this error persists. Error executing tool: 'Dataset collection has no element_identifier with key forward.'
{
"tool_id": "toolshed.g2.bx.psu.edu/repos/devteam/bowtie2/bowtie2/2.2.6.2",
"tool_version": "2.2.6.2",
"inputs": {
"library|type": "paired_collection",
"library|input_1": {
"values": [
{
@Micropathology
Micropathology / job_conf.xml
Created September 14, 2018 13:17
job_conf.xml
<?xml version="1.0"?>
-<job_conf>
-<plugins workers="4">
<plugin load="galaxy.jobs.runners.slurm:SlurmJobRunner" type="runner" id="slurm"/>
<plugin load="galaxy.jobs.runners.pulsar:PulsarRESTJobRunner" type="runner" id="pulsar_rest"/>
@Micropathology
Micropathology / galaxy.ini
Created September 14, 2018 13:14
galaxy.ini
[server:main]
use = egg:Paste#http
use_threadpool = True
host = 127.0.0.1
port = 8080
[filter:proxy-prefix]
use = egg:PasteDeploy#prefix
[filter:gzip]
@Micropathology
Micropathology / Copy of DEV_Hydra_typeref_pipeline270718_V2 shared by user danielhand.ga
Created September 14, 2018 13:11
Copy of DEV_Hydra_typeref_pipeline270718_V2 shared by user danielhand
{"uuid": "de323de5-7412-428c-9760-b9c25aaec178", "tags": [], "format-version": "0.1", "name": "Copy of DEV_Hydra_typeref_pipeline270718_V2 shared by user danielhand", "steps": {"0": {"tool_id": null, "tool_version": null, "outputs": [], "workflow_outputs": [{"output_name": "output", "uuid": "cc7b4f4a-1b61-4fce-8dd3-be755f48a1fe", "label": null}], "input_connections": {}, "tool_state": "{\"collection_type\": \"\"}", "id": 0, "uuid": "53c2a8a1-72af-4a89-8b59-1eada81931fb", "errors": null, "name": "Input dataset collection", "label": null, "inputs": [], "position": {"top": 1479.5625, "left": 301}, "annotation": "", "content_id": null, "type": "data_collection_input"}, "1": {"tool_id": null, "tool_version": null, "outputs": [], "workflow_outputs": [{"output_name": "output", "uuid": "98e3e462-104d-463c-ad3e-93e61ea4453e", "label": null}], "input_connections": {}, "tool_state": "{}", "id": 1, "uuid": "1c8d473d-c6d7-4a4e-a1b9-33558c71c534", "errors": null, "name": "Input dataset", "label": "Type_reference_fasta", "i
This file has been truncated, but you can view the full file.
CloudMan from Galaxy
Admin | Report bugs | Wiki | Screencast
The entire log file (/mnt/galaxy/galaxy-app/main.log) is shown. Show latest | Back to admin view
galaxy.queue_worker INFO 2018-09-13 10:26:49,630 Initializing main Galaxy Queue Worker on sqlalchemy+postgres://galaxy@localhost:5930/galaxy
tool_shed.tool_shed_registry DEBUG 2018-09-13 10:26:49,652 Loading references to tool sheds from /mnt/galaxy/galaxy-app/config/tool_sheds_conf.xml
tool_shed.tool_shed_registry DEBUG 2018-09-13 10:26:49,652 Loaded reference to tool shed: Galaxy Main Tool Shed
tool_shed.tool_shed_registry DEBUG 2018-09-13 10:26:49,652 Loaded reference to tool shed: Galaxy Test Tool Shed
#
# Live version of this file will get overwritten by CloudMan.
#
# See the slurm.conf man page for more information.
#
ClusterName=GalaxyCloudMan
ControlMachine=ip-172-31-77-176
SlurmUser=slurm
SlurmctldPort=6817
SlurmdPort=6818
<?xml version="1.0" ?><job_conf>
<plugins workers="4">
<plugin id="slurm" load="galaxy.jobs.runners.slurm:SlurmJobRunner" type="runner"/>
<plugin id="pulsar_rest" load="galaxy.jobs.runners.pulsar:PulsarRESTJobRunner" type="runner"/>
<plugin id="local" load="galaxy.jobs.runners.local:LocalJobRunner" type="runner" workers="10"/>
</plugins>
<handlers default="handlers"><handler id="handler0" tags="handlers"><plugin id="slurm"/></handler><handler id="handler1" tags="handlers"><plugin id="slurm"/></handler><handler id="handler2" tags="handlers"><plugin id="slurm"/></handler></handlers>
<destinations default="slurm_cluster">
<?xml version="1.0" ?><job_conf>
<plugins workers="4">
<plugin id="slurm" load="galaxy.jobs.runners.slurm:SlurmJobRunner" type="runner"/>
<plugin id="pulsar_rest" load="galaxy.jobs.runners.pulsar:PulsarRESTJobRunner" type="runner"/>
<plugin id="local" load="galaxy.jobs.runners.local:LocalJobRunner" type="runner" workers="50"/>
</plugins>
<handlers default="handlers"><handler id="handler0" tags="handlers"><plugin id="slurm"/></handler><handler id="handler1" tags="handlers"><plugin id="slurm"/></handler><handler id="handler2" tags="handlers"><plugin id="slurm"/></handler></handlers>
<destinations default="slurm_cluster">
</head>
<body scroll="no" class="full-content">
<div id="everything" style="position: absolute; top: 0; left: 0; width: 100%; height: 100%;">
<div id="background"></div>
<div id="masthead" class="navbar navbar-fixed-top navbar-inverse"></div>
<div id="messagebox" style="display: none;"></div>
<div id="inactivebox" class="panel-warning-message" style="display: none;"></div>