Skip to content

Instantly share code, notes, and snippets.

Micropathology

Block or report user

Report or block Micropathology

Hide content and notifications from this user.

Learn more about blocking users

Contact Support about this user’s behavior.

Learn more about reporting abuse

Report abuse
View GitHub Profile
View gist:45aded5fd6e68355df39fee73d0b56b3
slurmd[w3]: *** JOB 8 CANCELLED AT 2018-10-03T07:37:46 ***
slurmd[w3]: Unable to unlink domain socket: No such file or directory
slurmd[w3]: unlink(/tmp/slurm/slurmd_spool/job00008/slurm_script): No such file or directory
slurmd[w3]: rmdir(/tmp/slurm/slurmd_spool/job00008): No such file or directory
View gist:cbb6a77c8e60eceaeb1814a9ed218385
The server could not complete the request. Please contact the Galaxy Team if this error persists. Error executing tool: 'Dataset collection has no element_identifier with key forward.'
{
"tool_id": "toolshed.g2.bx.psu.edu/repos/devteam/bowtie2/bowtie2/2.2.6.2",
"tool_version": "2.2.6.2",
"inputs": {
"library|type": "paired_collection",
"library|input_1": {
"values": [
{
View job_conf.xml
<?xml version="1.0"?>
-<job_conf>
-<plugins workers="4">
<plugin load="galaxy.jobs.runners.slurm:SlurmJobRunner" type="runner" id="slurm"/>
<plugin load="galaxy.jobs.runners.pulsar:PulsarRESTJobRunner" type="runner" id="pulsar_rest"/>
View galaxy.ini
[server:main]
use = egg:Paste#http
use_threadpool = True
host = 127.0.0.1
port = 8080
[filter:proxy-prefix]
use = egg:PasteDeploy#prefix
[filter:gzip]
@Micropathology
Micropathology / Copy of DEV_Hydra_typeref_pipeline270718_V2 shared by user danielhand.ga
Created Sep 14, 2018
Copy of DEV_Hydra_typeref_pipeline270718_V2 shared by user danielhand
View Copy of DEV_Hydra_typeref_pipeline270718_V2 shared by user danielhand.ga
{"uuid": "de323de5-7412-428c-9760-b9c25aaec178", "tags": [], "format-version": "0.1", "name": "Copy of DEV_Hydra_typeref_pipeline270718_V2 shared by user danielhand", "steps": {"0": {"tool_id": null, "tool_version": null, "outputs": [], "workflow_outputs": [{"output_name": "output", "uuid": "cc7b4f4a-1b61-4fce-8dd3-be755f48a1fe", "label": null}], "input_connections": {}, "tool_state": "{\"collection_type\": \"\"}", "id": 0, "uuid": "53c2a8a1-72af-4a89-8b59-1eada81931fb", "errors": null, "name": "Input dataset collection", "label": null, "inputs": [], "position": {"top": 1479.5625, "left": 301}, "annotation": "", "content_id": null, "type": "data_collection_input"}, "1": {"tool_id": null, "tool_version": null, "outputs": [], "workflow_outputs": [{"output_name": "output", "uuid": "98e3e462-104d-463c-ad3e-93e61ea4453e", "label": null}], "input_connections": {}, "tool_state": "{}", "id": 1, "uuid": "1c8d473d-c6d7-4a4e-a1b9-33558c71c534", "errors": null, "name": "Input dataset", "label": "Type_reference_fasta", "i
View gist:5f441bf44c9427f4be934abc77b72a6d
This file has been truncated, but you can view the full file.
CloudMan from Galaxy
Admin | Report bugs | Wiki | Screencast
The entire log file (/mnt/galaxy/galaxy-app/main.log) is shown. Show latest | Back to admin view
galaxy.queue_worker INFO 2018-09-13 10:26:49,630 Initializing main Galaxy Queue Worker on sqlalchemy+postgres://galaxy@localhost:5930/galaxy
tool_shed.tool_shed_registry DEBUG 2018-09-13 10:26:49,652 Loading references to tool sheds from /mnt/galaxy/galaxy-app/config/tool_sheds_conf.xml
tool_shed.tool_shed_registry DEBUG 2018-09-13 10:26:49,652 Loaded reference to tool shed: Galaxy Main Tool Shed
tool_shed.tool_shed_registry DEBUG 2018-09-13 10:26:49,652 Loaded reference to tool shed: Galaxy Test Tool Shed
View gist:5feca43adc4fc0f931a53164673a66b9
#
# Live version of this file will get overwritten by CloudMan.
#
# See the slurm.conf man page for more information.
#
ClusterName=GalaxyCloudMan
ControlMachine=ip-172-31-77-176
SlurmUser=slurm
SlurmctldPort=6817
SlurmdPort=6818
View gist:0719fd1a54de69e727503fcd83be4279
<?xml version="1.0" ?><job_conf>
<plugins workers="4">
<plugin id="slurm" load="galaxy.jobs.runners.slurm:SlurmJobRunner" type="runner"/>
<plugin id="pulsar_rest" load="galaxy.jobs.runners.pulsar:PulsarRESTJobRunner" type="runner"/>
<plugin id="local" load="galaxy.jobs.runners.local:LocalJobRunner" type="runner" workers="10"/>
</plugins>
<handlers default="handlers"><handler id="handler0" tags="handlers"><plugin id="slurm"/></handler><handler id="handler1" tags="handlers"><plugin id="slurm"/></handler><handler id="handler2" tags="handlers"><plugin id="slurm"/></handler></handlers>
<destinations default="slurm_cluster">
View gist:8d26aa5d4f61441441755f4a634fa3cb
<?xml version="1.0" ?><job_conf>
<plugins workers="4">
<plugin id="slurm" load="galaxy.jobs.runners.slurm:SlurmJobRunner" type="runner"/>
<plugin id="pulsar_rest" load="galaxy.jobs.runners.pulsar:PulsarRESTJobRunner" type="runner"/>
<plugin id="local" load="galaxy.jobs.runners.local:LocalJobRunner" type="runner" workers="50"/>
</plugins>
<handlers default="handlers"><handler id="handler0" tags="handlers"><plugin id="slurm"/></handler><handler id="handler1" tags="handlers"><plugin id="slurm"/></handler><handler id="handler2" tags="handlers"><plugin id="slurm"/></handler></handlers>
<destinations default="slurm_cluster">
View gist:dee38479dc4b8f43e010df187b22ebe6
</head>
<body scroll="no" class="full-content">
<div id="everything" style="position: absolute; top: 0; left: 0; width: 100%; height: 100%;">
<div id="background"></div>
<div id="masthead" class="navbar navbar-fixed-top navbar-inverse"></div>
<div id="messagebox" style="display: none;"></div>
<div id="inactivebox" class="panel-warning-message" style="display: none;"></div>
You can’t perform that action at this time.