I wrote an shell script to automatically installs Python on my linux, like this:
[ ! -d /usr/local/src/python ] && mkdir /usr/local/src/python
[ ! -d /usr/local/python ] && mkdir /usr/local/python
tar -xvpf ~/Python-2.7.3.tgz -C /usr/local/src/
cd /usr/local/src/python/Python-2.7.3 && ./configure --prefix=/usr/local/python && make && make install
But it failed and the errors like this:
make: *** no rule to make target 'install '. stop.
I find the operation make succeeds while make install failed, anybody knows how to fix it? Thanks!
Below is the result of
cat Makefile | grep -B5 -A15 install
# time you run the configure script. Ideally, you can do:
#
# ./configure
# make
# make test
# make install
#
# If you have a previous version of Python installed that you don't
# want to overwrite, you can use "make altinstall" instead of "make
# install". Refer to the "Installing" section in the README file for
# additional details.
#
# See also the section "Build instructions" in the README file.
# === Variables set by makesetup ===
MODOBJS= Modules/threadmodule.o Modules/signalmodule.o Modules/posixmodule.o Modules/errnomodule.o Modules/pwdmodule.o Modules/_sre.o Modules/_codecsmodule.o Modules/_weakref.o Modules/zipimport.o Modules/symtablemodule.o Modules/xxsubtype.o
MODLIBS= $(LOCALMODLIBS) $(BASEMODLIBS)
# === Variables set by configure
VERSION= 2.7
srcdir= .
CC= gcc -pthread
--
SHELL= /bin/sh
# Use this to make a link between python$(VERSION) and python in $(BINDIR)
LN= ln
# Portable install script (configure doesn't always guess right)
INSTALL= /usr/bin/install -c
INSTALL_PROGRAM=${INSTALL}
INSTALL_SCRIPT= ${INSTALL}
INSTALL_DATA= ${INSTALL} -m 644
# Shared libraries must be installed with executable mode on some systems;
# rather than figuring out exactly which, we always give them executable mode.
# Also, making them read-only seems to be a good idea...
INSTALL_SHARED= ${INSTALL} -m 555
MAKESETUP= $(srcdir)/Modules/makesetup
# Compiler options
OPT= -DNDEBUG -g -fwrapv -O3 -Wall -Wstrict-prototypes
BASECFLAGS= -fno-strict-aliasing
CFLAGS= $(BASECFLAGS) -g -O2 $(OPT) $(EXTRA_CFLAGS)
# Both CPPFLAGS and LDFLAGS need to contain the shell's value for setup.py to
# be able to build extension modules using the directories specified in the
# environment variables
CPPFLAGS= -I. -IInclude -I$(srcdir)/Include
LDFLAGS=
--
#export MACOSX_DEPLOYMENT_TARGET
# Options to enable prebinding (for fast startup prior to Mac OS X 10.3)
OTHER_LIBTOOL_OPT=
# Environment to run shared python without installed libraries
RUNSHARED=
# Modes for directories, executables and data files created by the
# install process. Default to user-only-writable for all file types.
DIRMODE= 755
EXEMODE= 755
FILEMODE= 644
# configure script arguments
CONFIG_ARGS= '--prefix=/usr/local/pythonbvs'
# Subdirectories with code
SRCDIRS= Parser Grammar Objects Python Modules Mac
# Other subdirectories
SUBDIRSTOO= Include Lib Misc Demo
# Files and directories to be distributed
--
else \
$(BLDSHARED) -o $# $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST); \
fi
libpython$(VERSION).dylib: $(LIBRARY_OBJS)
$(CC) -dynamiclib -Wl,-single_module $(LDFLAGS) -undefined dynamic_lookup -Wl,-install_name,$(prefix)/lib/libpython$(VERSION).dylib -Wl,-compatibility_version,$(VERSION) -Wl,-current_version,$(VERSION) -o $# $(LIBRARY_OBJS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST); \
libpython$(VERSION).sl: $(LIBRARY_OBJS)
$(LDSHARED) -o $# $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST)
# Copy up the gdb python hooks into a position where they can be automatically
# loaded by gdb during Lib/test/test_gdb.py
#
# Distributors are likely to want to install this somewhere else e.g. relative
# to the stripped DWARF data for the shared library.
gdbhooks: $(BUILDPYTHON)-gdb.py
SRC_GDB_HOOKS=$(srcdir)/Tools/gdb/libpython.py
$(BUILDPYTHON)-gdb.py: $(SRC_GDB_HOOKS)
$(INSTALL_DATA) $(SRC_GDB_HOOKS) $(BUILDPYTHON)-gdb.py
# This rule is here for OPENSTEP/Rhapsody/MacOSX. It builds a temporary
# minimal framework (not including the Lib directory and such) in the current
# directory.
RESSRCDIR=Mac/Resources/framework
$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK): \
$(LIBRARY) \
$(RESSRCDIR)/Info.plist
$(INSTALL) -d -m $(DIRMODE) $(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)
$(CC) -o $(LDLIBRARY) $(LDFLAGS) -dynamiclib \
-all_load $(LIBRARY) -Wl,-single_module \
-install_name $(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK) \
-compatibility_version $(VERSION) \
-current_version $(VERSION);
$(INSTALL) -d -m $(DIRMODE) \
$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/Resources/English.lproj
$(INSTALL_DATA) $(RESSRCDIR)/Info.plist \
$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/Resources/Info.plist
$(LN) -fsn $(VERSION) $(PYTHONFRAMEWORKDIR)/Versions/Current
$(LN) -fsn Versions/Current/$(PYTHONFRAMEWORK) $(PYTHONFRAMEWORKDIR)/$(PYTHONFRAMEWORK)
$(LN) -fsn Versions/Current/Headers $(PYTHONFRAMEWORKDIR)/Headers
$(LN) -fsn Versions/Current/Resources $(PYTHONFRAMEWORKDIR)/Resources
# This rule builds the Cygwin Python DLL and import library if configured
# for a shared core library; otherwise, this rule is a noop.
$(DLLLIBRARY) libpython$(VERSION).dll.a: $(LIBRARY_OBJS)
if test -n "$(DLLLIBRARY)"; then \
--
-rm -f $(srcdir)/Lib/test/*.py[co]
-$(TESTPYTHON) $(TESTPROG) $(MEMTESTOPTS)
$(TESTPYTHON) $(TESTPROG) $(MEMTESTOPTS)
# Install everything
install: altinstall bininstall maninstall
# Install almost everything without disturbing previous versions
altinstall: altbininstall libinstall inclinstall libainstall \
sharedinstall oldsharedinstall
# Install shared libraries enabled by Setup
DESTDIRS= $(exec_prefix) $(LIBDIR) $(BINLIBDEST) $(DESTSHARED)
oldsharedinstall: $(DESTSHARED) $(SHAREDMODS)
#for i in X $(SHAREDMODS); do \
if test $$i != X; then \
echo $(INSTALL_SHARED) $$i $(DESTSHARED)/`basename $$i`; \
$(INSTALL_SHARED) $$i $(DESTDIR)$(DESTSHARED)/`basename $$i`; \
fi; \
done
$(DESTSHARED):
#for i in $(DESTDIRS); \
do \
if test ! -d $(DESTDIR)$$i; then \
echo "Creating directory $$i"; \
$(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$$i; \
else true; \
fi; \
done
# Install the interpreter by creating a symlink chain:
# $(PYTHON) -> python2 -> python$(VERSION))
# Also create equivalent chains for other installed files
bininstall: altbininstall
-if test -f $(DESTDIR)$(BINDIR)/$(PYTHON) -o -h $(DESTDIR)$(BINDIR)/$(PYTHON); \
then rm -f $(DESTDIR)$(BINDIR)/$(PYTHON); \
else true; \
fi
(cd $(DESTDIR)$(BINDIR); $(LN) -s python2$(EXE) $(PYTHON))
-rm -f $(DESTDIR)$(BINDIR)/python2$(EXE)
(cd $(DESTDIR)$(BINDIR); $(LN) -s python$(VERSION)$(EXE) python2$(EXE))
-rm -f $(DESTDIR)$(BINDIR)/python2-config
(cd $(DESTDIR)$(BINDIR); $(LN) -s python$(VERSION)-config python2-config)
-rm -f $(DESTDIR)$(BINDIR)/python-config
(cd $(DESTDIR)$(BINDIR); $(LN) -s python2-config python-config)
-test -d $(DESTDIR)$(LIBPC) || $(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$(LIBPC)
-rm -f $(DESTDIR)$(LIBPC)/python2.pc
(cd $(DESTDIR)$(LIBPC); $(LN) -s python-$(VERSION).pc python2.pc)
-rm -f $(DESTDIR)$(LIBPC)/python.pc
(cd $(DESTDIR)$(LIBPC); $(LN) -s python2.pc python.pc)
# Install the interpreter with $(VERSION) affixed
# This goes into $(exec_prefix)
altbininstall: $(BUILDPYTHON)
#for i in $(BINDIR) $(LIBDIR); \
do \
if test ! -d $(DESTDIR)$$i; then \
echo "Creating directory $$i"; \
$(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$$i; \
else true; \
fi; \
done
$(INSTALL_PROGRAM) $(BUILDPYTHON) $(DESTDIR)$(BINDIR)/python$(VERSION)$(EXE)
if test -f $(LDLIBRARY); then \
if test -n "$(DLLLIBRARY)" ; then \
$(INSTALL_SHARED) $(DLLLIBRARY) $(DESTDIR)$(BINDIR); \
else \
$(INSTALL_SHARED) $(LDLIBRARY) $(DESTDIR)$(LIBDIR)/$(INSTSONAME); \
if test $(LDLIBRARY) != $(INSTSONAME); then \
--
fi; \
else true; \
fi
# Install the manual page
maninstall:
#for i in $(MANDIR) $(MANDIR)/man1; \
do \
if test ! -d $(DESTDIR)$$i; then \
echo "Creating directory $$i"; \
$(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$$i; \
else true; \
fi; \
done
$(INSTALL_DATA) $(srcdir)/Misc/python.man \
$(DESTDIR)$(MANDIR)/man1/python$(VERSION).1
# Install the library
PLATDIR= plat-$(MACHDEP)
EXTRAPLATDIR=
EXTRAMACHDEPPATH=
--
distutils distutils/command distutils/tests $(XMLLIBSUBDIRS) \
multiprocessing multiprocessing/dummy \
unittest unittest/test \
lib-old \
curses pydoc_data $(MACHDEPS)
libinstall: build_all $(srcdir)/Lib/$(PLATDIR) $(srcdir)/Modules/xxmodule.c
#for i in $(SCRIPTDIR) $(LIBDEST); \
do \
if test ! -d $(DESTDIR)$$i; then \
echo "Creating directory $$i"; \
$(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$$i; \
else true; \
fi; \
done
#for d in $(LIBSUBDIRS); \
do \
a=$(srcdir)/Lib/$$d; \
if test ! -d $$a; then continue; else true; fi; \
b=$(LIBDEST)/$$d; \
if test ! -d $(DESTDIR)$$b; then \
echo "Creating directory $$b"; \
--
# is not available in configure
sed -e "s,#EXENAME#,$(BINDIR)/python$(VERSION)$(EXE)," < $(srcdir)/Misc/python-config.in >python-config
# Install the include files
INCLDIRSTOMAKE=$(INCLUDEDIR) $(CONFINCLUDEDIR) $(INCLUDEPY) $(CONFINCLUDEPY)
inclinstall:
#for i in $(INCLDIRSTOMAKE); \
do \
if test ! -d $(DESTDIR)$$i; then \
echo "Creating directory $$i"; \
$(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$$i; \
else true; \
fi; \
done
#for i in $(srcdir)/Include/*.h; \
do \
echo $(INSTALL_DATA) $$i $(INCLUDEPY); \
$(INSTALL_DATA) $$i $(DESTDIR)$(INCLUDEPY); \
done
$(INSTALL_DATA) pyconfig.h $(DESTDIR)$(CONFINCLUDEPY)/pyconfig.h
--
LIBPL= $(LIBP)/config
# pkgconfig directory
LIBPC= $(LIBDIR)/pkgconfig
libainstall: all python-config
#for i in $(LIBDIR) $(LIBP) $(LIBPL) $(LIBPC); \
do \
if test ! -d $(DESTDIR)$$i; then \
echo "Creating directory $$i"; \
$(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$$i; \
else true; \
fi; \
done
#if test -d $(LIBRARY); then :; else \
if test "$(PYTHONFRAMEWORKDIR)" = no-framework; then \
if test "$(SO)" = .dll; then \
$(INSTALL_DATA) $(LDLIBRARY) $(DESTDIR)$(LIBPL) ; \
else \
$(INSTALL_DATA) $(LIBRARY) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \
$(RANLIB) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \
fi; \
else \
echo Skip install of $(LIBRARY) - use make frameworkinstall; \
fi; \
fi
$(INSTALL_DATA) Modules/config.c $(DESTDIR)$(LIBPL)/config.c
$(INSTALL_DATA) Modules/python.o $(DESTDIR)$(LIBPL)/python.o
$(INSTALL_DATA) $(srcdir)/Modules/config.c.in $(DESTDIR)$(LIBPL)/config.c.in
$(INSTALL_DATA) Makefile $(DESTDIR)$(LIBPL)/Makefile
$(INSTALL_DATA) Modules/Setup $(DESTDIR)$(LIBPL)/Setup
$(INSTALL_DATA) Modules/Setup.local $(DESTDIR)$(LIBPL)/Setup.local
$(INSTALL_DATA) Modules/Setup.config $(DESTDIR)$(LIBPL)/Setup.config
$(INSTALL_DATA) Misc/python.pc $(DESTDIR)$(LIBPC)/python-$(VERSION).pc
$(INSTALL_SCRIPT) $(srcdir)/Modules/makesetup $(DESTDIR)$(LIBPL)/makesetup
$(INSTALL_SCRIPT) $(srcdir)/install-sh $(DESTDIR)$(LIBPL)/install-sh
$(INSTALL_SCRIPT) python-config $(DESTDIR)$(BINDIR)/python$(VERSION)-config
rm python-config
#if [ -s Modules/python.exp -a \
"`echo $(MACHDEP) | sed 's/^\(...\).*/\1/'`" = "aix" ]; then \
echo; echo "Installing support files for building shared extension modules on AIX:"; \
$(INSTALL_DATA) Modules/python.exp \
$(DESTDIR)$(LIBPL)/python.exp; \
echo; echo "$(LIBPL)/python.exp"; \
$(INSTALL_SCRIPT) $(srcdir)/Modules/makexp_aix \
$(DESTDIR)$(LIBPL)/makexp_aix; \
echo "$(LIBPL)/makexp_aix"; \
$(INSTALL_SCRIPT) $(srcdir)/Modules/ld_so_aix \
$(DESTDIR)$(LIBPL)/ld_so_aix; \
echo "$(LIBPL)/ld_so_aix"; \
echo; echo "See Misc/AIX-NOTES for details."; \
--
;; \
esac
# Install the dynamically loadable modules
# This goes into $(exec_prefix)
sharedinstall: sharedmods
$(RUNSHARED) ./$(BUILDPYTHON) -E $(srcdir)/setup.py install \
--prefix=$(prefix) \
--install-scripts=$(BINDIR) \
--install-platlib=$(DESTSHARED) \
--root=$(DESTDIR)/
# Here are a couple of targets for MacOSX again, to install a full
# framework-based Python. frameworkinstall installs everything, the
# subtargets install specific parts. Much of the actual work is offloaded to
# the Makefile in Mac
#
#
# This target is here for backward compatiblity, previous versions of Python
# hadn't integrated framework installation in the normal install process.
frameworkinstall: install
# On install, we re-make the framework
# structure in the install location, /Library/Frameworks/ or the argument to
# --enable-framework. If --enable-framework has been specified then we have
# automatically set prefix to the location deep down in the framework, so we
# only have to cater for the structural bits of the framework.
frameworkinstallframework: frameworkinstallstructure install frameworkinstallmaclib
frameworkinstallstructure: $(LDLIBRARY)
#if test "$(PYTHONFRAMEWORKDIR)" = no-framework; then \
echo Not configured with --enable-framework; \
exit 1; \
else true; \
fi
#for i in $(prefix)/Resources/English.lproj $(prefix)/lib; do\
if test ! -d $(DESTDIR)$$i; then \
echo "Creating directory $(DESTDIR)$$i"; \
$(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$$i; \
else true; \
fi; \
done
$(LN) -fsn include/python$(VERSION) $(DESTDIR)$(prefix)/Headers
sed 's/%VERSION%/'"`$(RUNSHARED) ./$(BUILDPYTHON) -c 'import platform; print platform.python_version()'`"'/g' < $(RESSRCDIR)/Info.plist > $(DESTDIR)$(prefix)/Resources/Info.plist
$(LN) -fsn $(VERSION) $(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/Versions/Current
$(LN) -fsn Versions/Current/$(PYTHONFRAMEWORK) $(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/$(PYTHONFRAMEWORK)
$(LN) -fsn Versions/Current/Headers $(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/Headers
$(LN) -fsn Versions/Current/Resources $(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/Resources
$(INSTALL_SHARED) $(LDLIBRARY) $(DESTDIR)$(PYTHONFRAMEWORKPREFIX)/$(LDLIBRARY)
# This installs Mac/Lib into the framework
# Install a number of symlinks to keep software that expects a normal unix
# install (which includes python-config) happy.
frameworkinstallmaclib:
ln -fs "../../../$(PYTHONFRAMEWORK)" "$(DESTDIR)$(prefix)/lib/python$(VERSION)/config/libpython$(VERSION).a"
ln -fs "../../../$(PYTHONFRAMEWORK)" "$(DESTDIR)$(prefix)/lib/python$(VERSION)/config/libpython$(VERSION).dylib"
ln -fs "../$(PYTHONFRAMEWORK)" "$(DESTDIR)$(prefix)/lib/libpython$(VERSION).dylib"
cd Mac && $(MAKE) installmacsubtree DESTDIR="$(DESTDIR)"
# This installs the IDE, the Launcher and other apps into /Applications
frameworkinstallapps:
cd Mac && $(MAKE) installapps DESTDIR="$(DESTDIR)"
# This install the unix python and pythonw tools in /usr/local/bin
frameworkinstallunixtools:
cd Mac && $(MAKE) installunixtools DESTDIR="$(DESTDIR)"
frameworkaltinstallunixtools:
cd Mac && $(MAKE) altinstallunixtools DESTDIR="$(DESTDIR)"
# This installs the Demos and Tools into the applications directory.
# It is not part of a normal frameworkinstall
frameworkinstallextras:
cd Mac && $(MAKE) installextras DESTDIR="$(DESTDIR)"
# This installs a few of the useful scripts in Tools/scripts
scriptsinstall:
SRCDIR=$(srcdir) $(RUNSHARED) \
./$(BUILDPYTHON) $(srcdir)/Tools/scripts/setup.py install \
--prefix=$(prefix) \
--install-scripts=$(BINDIR) \
--root=$(DESTDIR)/
# Build the toplevel Makefile
Makefile.pre: Makefile.pre.in config.status
CONFIG_FILES=Makefile.pre CONFIG_HEADERS= $(SHELL) config.status
$(MAKE) -f Makefile.pre Makefile
# Run the configure script.
config.status: $(srcdir)/configure
$(SHELL) $(srcdir)/configure $(CONFIG_ARGS)
.PRECIOUS: config.status $(BUILDPYTHON) Makefile Makefile.pre
# Some make's put the object file in the current directory
.c.o:
--
Python/thread.o: $(srcdir)/Python/thread_atheos.h $(srcdir)/Python/thread_beos.h $(srcdir)/Python/thread_cthread.h $(srcdir)/Python/thread_foobar.h $(srcdir)/Python/thread_lwp.h $(srcdir)/Python/thread_nt.h $(srcdir)/Python/thread_os2.h $(srcdir)/Python/thread_pth.h $(srcdir)/Python/thread_pthread.h $(srcdir)/Python/thread_sgi.h $(srcdir)/Python/thread_solaris.h $(srcdir)/Python/thread_wince.h
# Declare targets that aren't real files
.PHONY: all build_all sharedmods oldsharedmods test quicktest memtest
.PHONY: install altinstall oldsharedinstall bininstall altbininstall
.PHONY: maninstall libinstall inclinstall libainstall sharedinstall
.PHONY: frameworkinstall frameworkinstallframework frameworkinstallstructure
.PHONY: frameworkinstallmaclib frameworkinstallapps frameworkinstallunixtools
.PHONY: frameworkaltinstallunixtools recheck autoconf clean clobber distclean
.PHONY: smelly funny patchcheck
.PHONY: gdbhooks
# IF YOU PUT ANYTHING HERE IT WILL GO AWAY
# Rules appended by makedepend
Modules/threadmodule.o: $(srcdir)/Modules/threadmodule.c; $(CC) $(PY_CFLAGS) -c $(srcdir)/Modules/threadmodule.c -o Modules/threadmodule.o
Modules/threadmodule$(SO): Modules/threadmodule.o; $(BLDSHARED) Modules/threadmodule.o -o Modules/threadmodule$(SO)
Modules/signalmodule.o: $(srcdir)/Modules/signalmodule.c; $(CC) $(PY_CFLAGS) -c $(srcdir)/Modules/signalmodule.c -o Modules/signalmodule.o
Modules/signalmodule$(SO): Modules/signalmodule.o; $(BLDSHARED) Modules/signalmodule.o -o Modules/signalmodule$(SO)
Modules/posixmodule.o: $(srcdir)/Modules/posixmodule.c; $(CC) $(PY_CFLAGS) -c $(srcdir)/Modules/posixmodule.c -o Modules/posixmodule.o
Modules/posixmodule$(SO): Modules/posixmodule.o; $(BLDSHARED) Modules/posixmodule.o -o Modules/posixmodule$(SO)
Modules/errnomodule.o: $(srcdir)/Modules/errnomodule.c; $(CC) $(PY_CFLAGS) -c $(srcdir)/Modules/errnomodule.c -o Modules/errnomodule.o
Modules/errnomodule$(SO): Modules/errnomodule.o; $(BLDSHARED) Modules/errnomodule.o -o Modules/errnomodule$(SO)
Related
I am trying to run this docker image, but not sure why I am getting this error:
/usr/bin/time: cannot run /usr/bin/java: No such file or directory
Command exited with non-zero status 127
Can someone please help me debug this error?
My docker file:
FROM openjdk:8-jre
LABEL maintainer="APN <xxx#xxx.edu>"
LABEL org.label-schema.schema-version="1.0"
# LABEL org.label-schema.build-date=$BUILD_DATE
LABEL org.label-schema.name="apn/addreadgroups"
LABEL org.label-schema.description="Image for adding read groups in .bam"
ENV PICARD_VERSION 2.20.8
WORKDIR /tmp
RUN apt-get update -y \
&& apt-get install --no-install-recommends -y \
make \
gcc \
g++ \
libz-dev \
libbz2-dev \
liblzma-dev \
ncurses-dev \
bc \
libnss-sss \
time \
&& cd /tmp \
&& wget -q -O /usr/bin/picard.jar https://github.com/broadinstitute/picard/releases/download/${PICARD_VERSION}/picard.jar \
&& ln -sf /usr/share/zoneinfo/America/Chicago /etc/localtime \
&& echo "America/Chicago" > /etc/timezone \
&& dpkg-reconfigure --frontend noninteractive tzdata \
&& apt-get clean all \
&& rm -rfv /var/lib/apt/lists/* /tmp/* /var/tmp/*
# This makes the image crazy large -- will find a workaround
# COPY human_g1k_v37_decoy* /usr/local/
COPY ./entrypoint.sh /usr/local/bin/
ENV PICARD /usr/bin/picard.jar
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
# CMD ["/bin/bash"]
and the entrypoint.sh:
JAVAOPTS="-Xms2g -Xmx${MEM}g -XX:+UseSerialGC -Dpicard.useLegacyParser=false"
CUR_STEP="AddOrReplaceReadGroups"
/usr/bin/java ${JAVAOPTS} -jar "${PICARD}" \
"${CUR_STEP}" \
I="${INBAM}" \
O=${BAMFILE} \
RGID=${FLOWCELL} \
RGLB=${LIBRARY} \
RGPL=${PLATFORM} \
RGPU=${FLOWCELL} \
RGSM=${SM}
Exit status 127 means no command found.
This is due to the java command in openjdk:8-jre not located in /usr/bin/java, see next:
$ docker run -it openjdk:8-jre which java
/usr/local/openjdk-8/bin/java
I have created/utilized a docker file for sonarqube community edition provided by the docker hub. I have also added volumes accordingly in the docker file and set the ACI/App service restart policy as "NEVER", but still whenever ACI/App service restarts, there is no history for the already scanned projects (and signing in sonarqube azure container-based serverless instance always asks for the creation of new project -> creation of token all over again).
Could anyone help me troubleshoot this issue, Below is the docker file and sonar screenshot for reference
FROM alpine:3.11
ENV JAVA_VERSION="jdk-11.0.6+10" \
LANG='en_US.UTF-8' \
LANGUAGE='en_US:en' \
LC_ALL='en_US.UTF-8'
#
# glibc setup
#
RUN set -eux; \
apk add --no-cache --virtual .build-deps curl binutils; \
GLIBC_VER="2.31-r0"; \
ALPINE_GLIBC_REPO="https://github.com/sgerrand/alpine-pkg-glibc/releases/download"; \
GCC_LIBS_URL="https://archive.archlinux.org/packages/g/gcc-libs/gcc-libs-9.1.0-2-x86_64.pkg.tar.xz"; \
GCC_LIBS_SHA256="91dba90f3c20d32fcf7f1dbe91523653018aa0b8d2230b00f822f6722804cf08"; \
ZLIB_URL="https://archive.archlinux.org/packages/z/zlib/zlib-1%3A1.2.11-3-x86_64.pkg.tar.xz"; \
ZLIB_SHA256=17aede0b9f8baa789c5aa3f358fbf8c68a5f1228c5e6cba1a5dd34102ef4d4e5; \
curl -LfsS https://alpine-pkgs.sgerrand.com/sgerrand.rsa.pub -o /etc/apk/keys/sgerrand.rsa.pub; \
SGERRAND_RSA_SHA256="823b54589c93b02497f1ba4dc622eaef9c813e6b0f0ebbb2f771e32adf9f4ef2"; \
echo "${SGERRAND_RSA_SHA256} */etc/apk/keys/sgerrand.rsa.pub" | sha256sum -c -; \
curl -LfsS ${ALPINE_GLIBC_REPO}/${GLIBC_VER}/glibc-${GLIBC_VER}.apk > /tmp/glibc-${GLIBC_VER}.apk; \
apk add --no-cache /tmp/glibc-${GLIBC_VER}.apk; \
curl -LfsS ${ALPINE_GLIBC_REPO}/${GLIBC_VER}/glibc-bin-${GLIBC_VER}.apk > /tmp/glibc-bin-${GLIBC_VER}.apk; \
apk add --no-cache /tmp/glibc-bin-${GLIBC_VER}.apk; \
curl -LfsS ${ALPINE_GLIBC_REPO}/${GLIBC_VER}/glibc-i18n-${GLIBC_VER}.apk > /tmp/glibc-i18n-${GLIBC_VER}.apk; \
apk add --no-cache /tmp/glibc-i18n-${GLIBC_VER}.apk; \
/usr/glibc-compat/bin/localedef --force --inputfile POSIX --charmap UTF-8 "$LANG" || true; \
echo "export LANG=$LANG" > /etc/profile.d/locale.sh; \
curl -LfsS ${GCC_LIBS_URL} -o /tmp/gcc-libs.tar.xz; \
echo "${GCC_LIBS_SHA256} */tmp/gcc-libs.tar.xz" | sha256sum -c -; \
mkdir /tmp/gcc; \
tar -xf /tmp/gcc-libs.tar.xz -C /tmp/gcc; \
mv /tmp/gcc/usr/lib/libgcc* /tmp/gcc/usr/lib/libstdc++* /usr/glibc-compat/lib; \
strip /usr/glibc-compat/lib/libgcc_s.so.* /usr/glibc-compat/lib/libstdc++.so*; \
curl -LfsS ${ZLIB_URL} -o /tmp/libz.tar.xz; \
echo "${ZLIB_SHA256} */tmp/libz.tar.xz" | sha256sum -c -; \
mkdir /tmp/libz; \
tar -xf /tmp/libz.tar.xz -C /tmp/libz; \
mv /tmp/libz/usr/lib/libz.so* /usr/glibc-compat/lib; \
apk del --purge .build-deps glibc-i18n; \
rm -rf /tmp/*.apk /tmp/gcc /tmp/gcc-libs.tar.xz /tmp/libz /tmp/libz.tar.xz /var/cache/apk/*;
#
# AdoptOpenJDK/openjdk11 setup
#
RUN set -eux; \
apk add --no-cache --virtual .fetch-deps curl; \
ARCH="$(apk --print-arch)"; \
case "${ARCH}" in \
aarch64|arm64) \
ESUM='7ed04ed9ed7271528e7f03490f1fd7dfbbc2d391414bd6fe4dd80ec3bad76d30'; \
BINARY_URL='https://github.com/AdoptOpenJDK/openjdk11-binaries/releases/download/jdk-11.0.6%2B10/OpenJDK11U-jre_aarch64_linux_hotspot_11.0.6_10.tar.gz'; \
;; \
ppc64el|ppc64le) \
ESUM='49231f2c36487b53141ade3f7eb291e2855138b14b1129f9acf435ea9cc0e899'; \
BINARY_URL='https://github.com/AdoptOpenJDK/openjdk11-binaries/releases/download/jdk-11.0.6%2B10/OpenJDK11U-jre_ppc64le_linux_hotspot_11.0.6_10.tar.gz'; \
;; \
s390x) \
ESUM='bcb3f46cbad742b08c81e922e313549c029f436ac7d91ef3c9bed8e4049d67d2'; \
BINARY_URL='https://github.com/AdoptOpenJDK/openjdk11-binaries/releases/download/jdk-11.0.6%2B10/OpenJDK11U-jre_s390x_linux_hotspot_11.0.6_10.tar.gz'; \
;; \
amd64|x86_64) \
ESUM='c5a4e69e2be0e3e5f5bb7c759960b20650967d0f571baad4a7f15b2c03bda352'; \
BINARY_URL='https://github.com/AdoptOpenJDK/openjdk11-binaries/releases/download/jdk-11.0.6%2B10/OpenJDK11U-jre_x64_linux_hotspot_11.0.6_10.tar.gz'; \
;; \
*) \
echo "Unsupported arch: ${ARCH}"; \
exit 1; \
;; \
esac; \
curl -LfsSo /tmp/openjdk.tar.gz ${BINARY_URL}; \
echo "${ESUM} */tmp/openjdk.tar.gz" | sha256sum -c -; \
mkdir -p /opt/java/openjdk; \
cd /opt/java/openjdk; \
tar -xf /tmp/openjdk.tar.gz --strip-components=1; \
apk del --purge .fetch-deps; \
rm -rf /var/cache/apk/*; \
rm -rf /tmp/openjdk.tar.gz;
#
# SonarQube setup
#
ARG SONARQUBE_VERSION=8.4.0.35506
ARG SONARQUBE_ZIP_URL=https://binaries.sonarsource.com/Distribution/sonarqube/sonarqube-${SONARQUBE_VERSION}.zip
ENV JAVA_HOME=/opt/java/openjdk \
PATH="/opt/java/openjdk/bin:$PATH" \
SONARQUBE_HOME=/opt/sonarqube \
SONAR_VERSION="${SONARQUBE_VERSION}" \
SQ_DATA_DIR="/opt/sonarqube/data" \
SQ_EXTENSIONS_DIR="/opt/sonarqube/extensions" \
SQ_LOGS_DIR="/opt/sonarqube/logs" \
SQ_TEMP_DIR="/opt/sonarqube/temp"
RUN set -ex \
&& addgroup -S -g 1000 sonarqube \
&& adduser -S -D -u 1000 -G sonarqube sonarqube \
&& apk add --no-cache --virtual build-dependencies gnupg unzip curl \
&& apk add --no-cache bash su-exec ttf-dejavu \
# pub 2048R/D26468DE 2015-05-25
# Key fingerprint = F118 2E81 C792 9289 21DB CAB4 CFCA 4A29 D264 68DE
# uid sonarsource_deployer (Sonarsource Deployer) <infra#sonarsource.com>
# sub 2048R/06855C1D 2015-05-25
&& sed --in-place --expression="s?securerandom.source=file:/dev/random?securerandom.source=file:/dev/urandom?g" "${JAVA_HOME}/conf/security/java.security" \
&& for server in $(shuf -e ha.pool.sks-keyservers.net \
hkp://p80.pool.sks-keyservers.net:80 \
keyserver.ubuntu.com \
hkp://keyserver.ubuntu.com:80 \
pgp.mit.edu) ; do \
gpg --batch --keyserver "${server}" --recv-keys F1182E81C792928921DBCAB4CFCA4A29D26468DE && break || : ; \
done \
&& mkdir --parents /opt \
&& cd /opt \
&& curl --fail --location --output sonarqube.zip --silent --show-error "${SONARQUBE_ZIP_URL}" \
&& curl --fail --location --output sonarqube.zip.asc --silent --show-error "${SONARQUBE_ZIP_URL}.asc" \
&& gpg --batch --verify sonarqube.zip.asc sonarqube.zip \
&& unzip -q sonarqube.zip \
&& mv "sonarqube-${SONARQUBE_VERSION}" sonarqube \
&& rm sonarqube.zip* \
&& rm -rf ${SONARQUBE_HOME}/bin/* \
&& chown -R sonarqube:sonarqube ${SONARQUBE_HOME} \
# this 777 will be replaced by 700 at runtime (allows semi-arbitrary "--user" values)
&& chmod -R 777 "${SQ_DATA_DIR}" "${SQ_EXTENSIONS_DIR}" "${SQ_LOGS_DIR}" "${SQ_TEMP_DIR}" \
&& apk del --purge build-dependencies
COPY --chown=sonarqube:sonarqube run.sh sonar.sh ${SONARQUBE_HOME}/bin/
VOLUME ["/opt/sonarqube/data","/opt/sonarqube/extensions","/opt/sonarqube/logs","/opt/sonarqube/temp"]
WORKDIR ${SONARQUBE_HOME}
EXPOSE 9000
#These steps are added to give the desired access to the *sh files to execute in the docker web app service server.
RUN chmod 755 ./bin/run.sh
RUN chmod 755 ./bin/sonar.sh
ENTRYPOINT ["bin/run.sh"]
CMD ["bin/sonar.sh"]
SonarQubeServer:
enter image description here
According to the documentation: "By default, Azure Container Instances are stateless. If the container crashes or stops, all of its state is lost. To persist state beyond the lifetime of the container, you must mount a volume from an external store."
The documentation explains how to mount an Azure file share in Azure Container Instances.
I Cross compiled cups 1.7.0 for sitara arm linux 6
I followed
./configure --host=arm-linux-gnueabihf --disable-gssapi --prefix=/media/rootfs
make
make install
All cups related files are automatically saved in sd card ,
but it shows error on typing cupsd command and not starting cups server
cupsd: Child exited on signal 1.
On checking /etc/cups/cupsd.conf, several paths in the configuration files are
/media/rootfs/var/run/cups/cups.sock
instead of
/var/run/cups/cups.sock
1)how to install this compiled cups to target board without --prefix?
2)Is there any step missing for cross compilation?
Any help will be thankfull.
This is Makefile
#
# "$Id: Makefile 11107 2013-07-08 13:47:51Z msweet $"
#
# Top-level Makefile for CUPS.
#
# Copyright 2007-2013 by Apple Inc.
# Copyright 1997-2007 by Easy Software Products, all rights reserved.
#
# These coded instructions, statements, and computer programs are the
# property of Apple Inc. and are protected by Federal copyright
# law. Distribution and use rights are outlined in the file "LICENSE.txt"
# which should have been included with this file. If this file is
# file is missing or damaged, see the license at "http://www.cups.org/".
#
include Makedefs
#
# Directories to make...
#
DIRS = cups test $(BUILDDIRS)
#
# Make all targets...
#
all:
chmod +x cups-config
echo Using ARCHFLAGS="$(ARCHFLAGS)"
echo Using ALL_CFLAGS="$(ALL_CFLAGS)"
echo Using ALL_CXXFLAGS="$(ALL_CXXFLAGS)"
echo Using CC="$(CC)"
echo Using CXX="$(CC)"
echo Using DSOFLAGS="$(DSOFLAGS)"
echo Using LDFLAGS="$(LDFLAGS)"
echo Using LIBS="$(LIBS)"
for dir in $(DIRS); do\
echo Making all in $$dir... ;\
(cd $$dir ; $(MAKE) $(MFLAGS) all $(UNITTESTS)) || exit 1;\
done
#
# Make library targets...
#
libs:
echo Using ARCHFLAGS="$(ARCHFLAGS)"
echo Using ALL_CFLAGS="$(ALL_CFLAGS)"
echo Using ALL_CXXFLAGS="$(ALL_CXXFLAGS)"
echo Using CC="$(CC)"
echo Using CXX="$(CC)"
echo Using DSOFLAGS="$(DSOFLAGS)"
echo Using LDFLAGS="$(LDFLAGS)"
echo Using LIBS="$(LIBS)"
for dir in $(DIRS); do\
echo Making libraries in $$dir... ;\
(cd $$dir ; $(MAKE) $(MFLAGS) libs) || exit 1;\
done
#
# Make unit test targets...
#
unittests:
echo Using ARCHFLAGS="$(ARCHFLAGS)"
echo Using ALL_CFLAGS="$(ALL_CFLAGS)"
echo Using ALL_CXXFLAGS="$(ALL_CXXFLAGS)"
echo Using CC="$(CC)"
echo Using CXX="$(CC)"
echo Using DSOFLAGS="$(DSOFLAGS)"
echo Using LDFLAGS="$(LDFLAGS)"
echo Using LIBS="$(LIBS)"
for dir in $(DIRS); do\
echo Making all in $$dir... ;\
(cd $$dir ; $(MAKE) $(MFLAGS) unittests) || exit 1;\
done
#
# Remove object and target files...
#
clean:
for dir in $(DIRS); do\
echo Cleaning in $$dir... ;\
(cd $$dir; $(MAKE) $(MFLAGS) clean) || exit 1;\
done
#
# Remove all non-distribution files...
#
distclean: clean
$(RM) Makedefs config.h config.log config.status
$(RM) cups-config
$(RM) conf/cupsd.conf conf/mime.convs conf/pam.std conf/snmp.conf
$(RM) doc/help/ref-cupsd-conf.html doc/help/standard.html doc/index.html
$(RM) man/client.conf.man
$(RM) man/cups-deviced.man man/cups-driverd.man
$(RM) man/cups-lpd.man man/cupsaddsmb.man man/cupsd.man
$(RM) man/cupsd.conf.man man/drv.man man/lpoptions.man
$(RM) packaging/cups.list
$(RM) packaging/cups-desc.plist packaging/cups-info.plist
$(RM) templates/header.tmpl
$(RM) desktop/cups.desktop
$(RM) scheduler/cups.sh scheduler/cups-lpd.xinetd
$(RM) scheduler/org.cups.cups-lpd.plist scheduler/cups.xml
-$(RM) doc/*/index.html
-$(RM) templates/*/header.tmpl
-$(RM) -r autom4te*.cache clang cups/charmaps cups/locale driver/test
#
# Make dependencies
#
depend:
for dir in $(DIRS); do\
echo Making dependencies in $$dir... ;\
(cd $$dir; $(MAKE) $(MFLAGS) depend) || exit 1;\
done
#
# Run the clang.llvm.org static code analysis tool on the C sources.
# (at least checker-231 is required for scan-build to work this way)
#
.PHONY: clang clang-changes
clang:
$(RM) -r clang
scan-build -V -k -o `pwd`/clang $(MAKE) $(MFLAGS) clean all
clang-changes:
scan-build -V -k -o `pwd`/clang $(MAKE) $(MFLAGS) all
#
# Generate a ctags file...
#
ctags:
ctags -R .
#
# Install everything...
#
install: install-data install-headers install-libs install-exec
#
# Install data files...
#
install-data:
echo Making all in cups...
(cd cups; $(MAKE) $(MFLAGS) all)
for dir in $(DIRS); do\
echo Installing data files in $$dir... ;\
(cd $$dir; $(MAKE) $(MFLAGS) install-data) || exit 1;\
done
echo Installing cups-config script...
$(INSTALL_DIR) -m 755 $(BINDIR)
$(INSTALL_SCRIPT) cups-config $(BINDIR)/cups-config
#
# Install header files...
#
install-headers:
for dir in $(DIRS); do\
echo Installing header files in $$dir... ;\
(cd $$dir; $(MAKE) $(MFLAGS) install-headers) || exit 1;\
done
if test "x$(privateinclude)" != x; then \
echo Installing config.h into $(PRIVATEINCLUDE)...; \
$(INSTALL_DIR) -m 755 $(PRIVATEINCLUDE); \
$(INSTALL_DATA) config.h $(PRIVATEINCLUDE)/config.h; \
fi
#
# Install programs...
#
install-exec: all
for dir in $(DIRS); do\
echo Installing programs in $$dir... ;\
(cd $$dir; $(MAKE) $(MFLAGS) install-exec) || exit 1;\
done
#
# Install libraries...
#
install-libs: libs
for dir in $(DIRS); do\
echo Installing libraries in $$dir... ;\
(cd $$dir; $(MAKE) $(MFLAGS) install-libs) || exit 1;\
done
#
# Uninstall object and target files...
#
uninstall:
for dir in $(DIRS); do\
echo Uninstalling in $$dir... ;\
(cd $$dir; $(MAKE) $(MFLAGS) uninstall) || exit 1;\
done
echo Uninstalling cups-config script...
$(RM) $(BINDIR)/cups-config
-$(RMDIR) $(BINDIR)
#
# Run the test suite...
#
test: all unittests
echo Running CUPS test suite...
cd test; ./run-stp-tests.sh
check: all unittests
echo Running CUPS test suite with defaults...
cd test; ./run-stp-tests.sh 1 0 n n
debugcheck: all unittests
echo Running CUPS test suite with debug printfs...
cd test; ./run-stp-tests.sh 1 0 n y
#
# Create HTML documentation using Mini-XML's mxmldoc (http://www.msweet.org/)...
#
apihelp:
for dir in cgi-bin cups filter ppdc scheduler; do\
echo Generating API help in $$dir... ;\
(cd $$dir; $(MAKE) $(MFLAGS) apihelp) || exit 1;\
done
framedhelp:
for dir in cgi-bin cups filter ppdc scheduler; do\
echo Generating framed API help in $$dir... ;\
(cd $$dir; $(MAKE) $(MFLAGS) framedhelp) || exit 1;\
done
#
# Create an Xcode docset using Mini-XML's mxmldoc (http://www.msweet.org/)...
#
docset: apihelp
echo Generating docset directory tree...
$(RM) -r org.cups.docset
mkdir -p org.cups.docset/Contents/Resources/Documentation/help
mkdir -p org.cups.docset/Contents/Resources/Documentation/images
cd man; $(MAKE) $(MFLAGS) html
cd doc; $(MAKE) $(MFLAGS) docset
cd cgi-bin; $(MAKE) $(MFLAGS) makedocset
cgi-bin/makedocset org.cups.docset \
`svnversion . | sed -e '1,$$s/[a-zA-Z]//g'` \
doc/help/api-*.tokens
$(RM) doc/help/api-*.tokens
echo Indexing docset...
/Applications/Xcode.app/Contents/Developer/usr/bin/docsetutil index org.cups.docset
echo Generating docset archive and feed...
$(RM) org.cups.docset.atom
/Applications/Xcode.app/Contents/Developer/usr/bin/docsetutil package --output org.cups.docset.xar \
--atom org.cups.docset.atom \
--download-url http://www.cups.org/org.cups.docset.xar \
org.cups.docset
#
# Lines of code computation...
#
sloc:
for dir in cups scheduler; do \
(cd $$dir; $(MAKE) $(MFLAGS) sloc) || exit 1;\
done
#
# Make software distributions using EPM (http://www.msweet.org/)...
#
EPMFLAGS = -v --output-dir dist $(EPMARCH)
aix bsd deb depot inst pkg setld slackware swinstall tardist:
epm $(EPMFLAGS) -f $# cups packaging/cups.list
epm:
epm $(EPMFLAGS) -s packaging/installer.gif cups packaging/cups.list
rpm:
epm $(EPMFLAGS) -f rpm -s packaging/installer.gif cups packaging/cups.list
.PHONEY: dist
dist: all
$(RM) -r dist
$(MAKE) $(MFLAGS) epm
case `uname` in \
*BSD*) $(MAKE) $(MFLAGS) bsd;; \
Darwin*) $(MAKE) $(MFLAGS) osx;; \
Linux*) test ! -x /usr/bin/rpm || $(MAKE) $(MFLAGS) rpm;; \
SunOS*) $(MAKE) $(MFLAGS) pkg;; \
esac
#
# Don't run top-level build targets in parallel...
#
.NOTPARALLEL:
#
# End of "$Id: Makefile 11107 2013-07-08 13:47:51Z msweet $".
#
DSTROOT=/media/rootfs option in make install is the solution for cups.
./configure --host=arm-linux-gnueabihf --disable-gssapi --libdir=/usr/lib
make
sudo make install DSTROOT=/media/rootfs
Dont use --prefix.
cupsd: Child exited on signal 1.
This error is due to hardcoding of paths.You can avoid it by placing your installed executables in same directory by creating it in your rootfs.
There are two way to avoid above 1) while configuring the source-code give the prefixsome other folder like $HOME/cups so that this will be a standalone. create directory by the same name $HOME/cups same as you prefix in your rootfs copy this standalone executable as u mentioned in prefix.
e.g --prefix=/home/vinay/cups then make same directory path in your rootfs /home/vinay/cups and copy all your executables here.
For second way i am not sure it will work or not since for qt project i tried and its worked.By Use of sysroot
2)provide option --sysroot=/media/rootfs --prefix=/media/rootfs so that while executing ypur executables executable look correctly search the path with help of sysroot.
I have quite often some very long console commands like:
python /var/www/closure-library/closure/bin/calcdeps.py \
-i myJSFile.js \
-p ../closure-library/closure/goog/ \
-o compiled \
-c /var/www//closure-compiler/build/compiler.jar \
-f "--compilation_level=ADVANCED_OPTIMIZATIONS" \
-f "--define=goog.LOCALE='de'" > myOutputFile.js
and I would like to use simply:
closure -i myJSFile.js -o myOutputFile.js
or something simmilar. How can I do this?
Look up aliases in your shell's manpage.
Perhaps something like:
alias closure='python /var/www/closure-library/closure/bin/calcdeps.py -p ../closure-library/closure/goog/ -c /var/www//closure-compiler/build/compiler.jar -f "--compilation_level=ADVANCED_OPTIMIZATIONS" -o compiled'
Then you could do
$ closure -i myJSFile.js > myOutputFile.js
You can write a script too and handle -o myOutputFile.js option.
#!/bin/bash
if [ $# -ne 2 ]; then
echo "Usage: closure InputFile OutputFile"
exit 1
fi
python /var/www/closure-library/closure/bin/calcdeps.py \
-i "$1" \
-p ../closure-library/closure/goog/ \
-o compiled \
-c /var/www//closure-compiler/build/compiler.jar \
-f "--compilation_level=ADVANCED_OPTIMIZATIONS" \
-f "--define=goog.LOCALE='de'" > "$2"
And you could do closure myJSFile.js myOutputFile.js
So I want to download multiple files from rapidshare. This what I currently have. I created a cookie by running-
wget \
--save-cookies ~/.cookies/rapidshare \
--post-data "login=USERNAME&password=PASSWORD" \
--no-check-certificate \
-O - \
https://ssl.rapidshare.com/cgi-bin/premiumzone.cgi \
> /dev/null
and now I have a shell script which I run which looks like this-
#!/bin/bash
wget -c --load-cookies ~/.cookies/rapidshare http://rapidshare.com/files/219920856/file1.rar
wget -c --load-cookies ~/.cookies/rapidshare http://rapidshare.com/files/393839302/file2.rar
wget -c --load-cookies ~/.cookies/rapidshare http://rapidshare.com/files/398293204/file3.rar
....
I want two things-
The shell script needs to read the files to download from a file.
The shell script should download anywhere from 2 - 8 files at a time.
Thanks!
When you want parallel jobs, think make.
#!/usr/bin/make -f
login:
wget -qO/dev/null \
--save-cookies ~/.cookies/rapidshare \
--post-data "login=USERNAME&password=PASSWORD" \
--no-check-certificate \
https://ssl.rapidshare.com/cgi-bin/premiumzone.cgi
$(MAKEFILES):
%: login
wget -ca$(addsuffix .log,$(notdir $#)) \
--load-cookies ~/.cookies/rapidshare $#
#echo "Downloaded $# (log in $(addsuffix .log,$(notdir $#)))"
Save this as rsget somewhere in $PATH (make sure you use tabs and not spaces for indentation), give it chmod +x, and run
rsget -kj8 \
http://rapidshare.com/files/219920856/file1.rar \
http://rapidshare.com/files/393839302/file2.rar \
http://rapidshare.com/files/398293204/file3.rar \
...
This will log in, then wget each target. -j8 tells make to run up to 8 jobs in parallel, and -k means "keep going even if a target returned failure".
Edit
Tested with GNU Make 3.79 and 3.81.
Try this. I think it should do what you want:
#! /bin/bash
MAX_CONCURRENT=8
URL_BASE="http://rapidshare.com/files/"
cookie_file=~/.cookies/rapidshare
# do your login thing here...
[ -n "$1" -a -f "$1" ] || { echo "please provide a file containing the stuff to download"; exit 1; }
inputfile=$1
count=0
while read x; do
if [ $count -ge $MAX_CONCURRENT ]; then
count=0
wait
fi
{ wget -c --load-cookies "$cookie_file" "${URL_BASE}$x" && echo "Downloaded $x"; } &
count=$((count + 1))
done < $inputfile