shell bypass 403

GrazzMean-Shell Shell

: /usr/bin/ [ drwxr-xr-x ]
Uname: Linux wputd 5.4.0-200-generic #220-Ubuntu SMP Fri Sep 27 13:19:16 UTC 2024 x86_64
Software: Apache/2.4.41 (Ubuntu)
PHP version: 7.4.3-4ubuntu2.24 [ PHP INFO ] PHP os: Linux
Server Ip: 158.69.144.88
Your Ip: 18.119.158.142
User: www-data (33) | Group: www-data (33)
Safe Mode: OFF
Disable Function:
pcntl_alarm,pcntl_fork,pcntl_waitpid,pcntl_wait,pcntl_wifexited,pcntl_wifstopped,pcntl_wifsignaled,pcntl_wifcontinued,pcntl_wexitstatus,pcntl_wtermsig,pcntl_wstopsig,pcntl_signal,pcntl_signal_get_handler,pcntl_signal_dispatch,pcntl_get_last_error,pcntl_strerror,pcntl_sigprocmask,pcntl_sigwaitinfo,pcntl_sigtimedwait,pcntl_exec,pcntl_getpriority,pcntl_setpriority,pcntl_async_signals,pcntl_unshare,

name : oem-getlogs
#!/usr/bin/python3

from apport import hookutils

from glob import glob
from io import BytesIO
from problem_report import CompressedValue
import apport
import os
import re
import shutil
import subprocess
import sys
import tempfile
import time
import zipfile

opt_debug = False


# Apport helper routines
def debug(text):
    if opt_debug:
        print("%s\n" % (text))


def attach_command_output(report, command_list, key):
    debug("%s" % (' '.join(command_list)))
    log = hookutils.command_output(command_list)
    if not log or log[:5] == "Error":
        return
    report[key] = log


def attach_pathglob_as_zip(report, pathglob, key, data_filter=None, type="b"):
    """Use zip file here because tarfile module in linux can't
       properly handle file size 0 with content in /sys directory like
       edid file. zipfile module works fine here. So we use it.

       type:
            a: for ascii  type of data
            b: for binary type of data
       """
    if data_filter is None:
        data_filter = lambda x: x
    filelist = []
    for pg in pathglob:
        for file in glob(pg):
            filelist.append(file)

    zipf = BytesIO()
    with zipfile.ZipFile(zipf, mode='w', compression=zipfile.ZIP_DEFLATED) as \
            zipobj:
        for f in filelist:
            if opt_debug:
                print(key, f)
            if not os.path.isfile(f):
                if opt_debug:
                    print(f, "is not a file")
                continue
            if type == "a":
                with open(f) as f_fd:
                    data = f_fd.read()
                    zipobj.writestr(f, data_filter(data))
            else:
                zipobj.write(f)
    cvalue = CompressedValue()
    cvalue.set_value(zipf.getbuffer())
    report[key + ".zip"] = cvalue


def attach_nvidia_debug_logs(report, keep_locale=False):
    # check if nvidia-bug-report.sh exists
    nv_debug_command = 'nvidia-bug-report.sh'

    if shutil.which(nv_debug_command) is None:
        if opt_debug:
            print(nv_debug_command, "does not exist.")
        return

    env = os.environ.copy()
    if not keep_locale:
        env['LC_MESSAGES'] = 'C'

    # output result to temp directory
    nv_tempdir = tempfile.mkdtemp()
    nv_debug_file = 'nvidia-bug-report'
    nv_debug_fullfile = os.path.join(nv_tempdir, nv_debug_file)
    nv_debug_cmd = [nv_debug_command, '--output-file', nv_debug_fullfile]
    try:
        with open(os.devnull, 'w') as devnull:
            subprocess.run(nv_debug_cmd, env=env, stdout=devnull,
                           stderr=devnull)
            nv_debug_fullfile_gz = nv_debug_fullfile + ".gz"
            hookutils.attach_file_if_exists(report, nv_debug_fullfile_gz,
                                            'nvidia-bug-report.gz')
            os.unlink(nv_debug_fullfile_gz)
            os.rmdir(nv_tempdir)
    except OSError as e:
        print("Error:", str(e))
        print("Fail on cleanup", nv_tempdir, ". Please file a bug for it.")


def dot():
    print(".", end="", flush=True)


def build_packages():
    # related packages
    packages = ['apt', 'grub2']

    # display
    packages.append('xorg')
    packages.append('gnome-shell')

    # audio
    packages.append('alsa-base')

    # hotkey and hotplugs
    packages.append('udev')

    # networking issues
    packages.append('network-manager')

    return packages


def helper_url_credential_filter(string_with_urls):
    return re.sub(r"://\w+?:\w+?@", "://USER:SECRET@", string_with_urls)


def add_info(report):
    # Check if the DCD file is exist in the installer.
    attach_command_output(report, ['ubuntu-report', 'show'], 'UbuntuReport')
    dot()
    hookutils.attach_file_if_exists(report, '/etc/buildstamp', 'BuildStamp')
    dot()
    attach_pathglob_as_zip(report,
                           ['/sys/firmware/acpi/tables/*',
                            '/sys/firmware/acpi/tables/*/*'],
                           "acpitables")
    dot()

    # Basic hardare information
    hookutils.attach_hardware(report)
    dot()
    hookutils.attach_wifi(report)
    dot()

    hwe_system_commands = {'lspci--xxxx': ['lspci', '-xxxx'],
                           'lshw.json': ['lshw', '-json', '-numeric'],
                           'dmidecode': ['dmidecode'],
                           'fwupdmgr_get-devices': ['fwupdmgr', 'get-devices',
                                                    '--show-all-devices',
                                                    '--no-unreported-check'],
                           'boltctl-list': ['boltctl', 'list'],
                           'mokutil---sb-state': ['mokutil', '--sb-state'],
                           'tlp-stat': ['tlp-stat']
                           }
    for name in hwe_system_commands:
        attach_command_output(report, hwe_system_commands[name], name)
        dot()

    # More audio related
    hookutils.attach_alsa(report)
    dot()
    audio_system_commands = {'pactl-list': ['pactl', 'list'],
                             'aplay-l': ['aplay', '-l'],
                             'aplay-L': ['aplay', '-L'],
                             'arecord-l': ['arecord', '-l'],
                             'arecord-L': ['arecord', '-L']
                             }
    for name in audio_system_commands:
        attach_command_output(report, audio_system_commands[name], name)
        dot()
    attach_pathglob_as_zip(report, ['/usr/share/alsa/ucm/*/*'], "ALSA-UCM")
    dot()

    # FIXME: should be included in xorg in the future
    gfx_system_commands = {'glxinfo': ['glxinfo'],
                           'xrandr': ['xrandr'],
                           'xinput': ['xinput']
                           }
    for name in gfx_system_commands:
        attach_command_output(report, gfx_system_commands[name], name)
        dot()
    attach_pathglob_as_zip(report, ['/sys/devices/*/*/drm/card?/*/edid'],
                           "EDID")
    dot()

    # nvidia-bug-reports.sh
    attach_nvidia_debug_logs(report)
    dot()

    # FIXME: should be included in thermald in the future
    attach_pathglob_as_zip(report,
                           ["/etc/thermald/*",
                            "/sys/devices/virtual/thermal/*",
                            "/sys/class/thermal/*"], "THERMALD")
    dot()

    # all kernel and system messages
    attach_pathglob_as_zip(report, ["/var/log/*", "/var/log/*/*"], "VAR_LOG")
    dot()

    # apt configs
    attach_pathglob_as_zip(report, [
        "/etc/apt/apt.conf.d/*",
        "/etc/apt/sources.list",
        "/etc/apt/sources.list.d/*.list",
        "/etc/apt/preferences.d/*"], "APT_CONFIGS",
        type="a", data_filter=helper_url_credential_filter)
    dot()

    # TODO: debug information for suspend or hibernate

    # packages installed.
    attach_command_output(report, ['dpkg', '-l'], 'dpkg-l')
    dot()

    # FIXME: should be included in bluez in the future
    attach_command_output(report, ['hciconfig', '-a'], 'hciconfig-a')
    dot()

    # FIXME: should be included in dkms in the future
    attach_command_output(report, ['dkms', 'status'], 'dkms_status')
    dot()

    # enable when the feature to include data from package hooks exists.
    # packages = build_packages()
    # attach_related_packages(report, packages)


if __name__ == '__main__':
    from argparse import ArgumentParser
    import gzip

    parser = ArgumentParser(prog="oem-getlogs",
                            usage="Useage: sudo -E oem-getlogs [-c CASE_ID]",
                            description="Get Hardware Enablement related logs")
    parser.add_argument("-c", "--case-id", help="optional CASE_ID", dest="cid",
                        default="")
    args = parser.parse_args()

    # check if we got root permission
    if os.geteuid() != 0:
        print("Error: you need to run this program as root")
        parser.print_help()
        sys.exit(1)

    print("Start to collect logs: ", end="", flush=True)
    # create report
    report = apport.Report()
    add_info(report)

    # generate filename
    hostname = os.uname()[1]
    date_time = time.strftime("%Y%m%d%H%M%S%z", time.localtime())
    filename_lst = ["oemlogs", hostname]
    if (len(args.cid) > 0):
        filename_lst.append(args.cid)
    filename_lst.append(date_time + ".apport.gz")
    filename = "-".join(filename_lst)

    with gzip.open(filename, 'wb') as f:
        report.write(f)
    print("\nSaved log to", filename)
    print("The owner of the file is root. You might want to")
    print("    chown [user].[group]", filename)
© 2025 GrazzMean-Shell
January 2023 - Michigan AI Application Development - Best Microsoft C# Developers & Technologists

Tech Blog

Tech Insights, Information, and Inspiration
Notion Github Integration

Notion Github Integration

Notion’s GitHub integration makes it easy to stay organized and up-to-date on your projects. You can use the integration to connect Notion to your GitHub repositories and keep track of their progress in your Notion workspace. You can also use Notion to create issues, pull requests, and manage reviews. This integration allows you to keep all of your project information in one place, making it easier to stay on top of tasks and collaborate with your team.

Confluence Slack Integration 

Confluence Slack Integration 

Confluence Slack Integration is a feature that allows users to integrate their Confluence workspace with their Slack workspace. This integration allows users to access and manage their Confluence workspace within Slack, making it easier to collaborate on tasks or stay up-to-date with any important updates. Users can link their Confluence account to their Slack workspace to gain access to all of the features that the integration provides.

What is the software development life cycle?

What is the software development life cycle?

The software development life cycle (SDLC) is a process used by software engineers to plan, design, develop, test and deploy software applications. The SDLC is a framework that describes the steps taken during the software development process, from initial concept to final deployment. It is often used as a guide for developers to ensure that all necessary steps are taken and that all deliverables are satisfactory.

Ecommerce Web Development

Ecommerce Web Development

Ecommerce web development is the process of creating an online store or website that allows customers to purchase goods and services. This process involves designing, coding, programming, and hosting the website, along with marketing and customer service. This type of website typically includes a shopping cart, payment processing, and a content management system to manage the products and services being sold.

Jira GitHub Integration

Jira GitHub Integration

Jira and GitHub Integration enables teams to link and track their project development quickly and accurately. By integrating Jira and GitHub, teams can easily track workflows, view the current status of their project and have visibility into what’s been completed, what’s in progress, and what’s left to do.

Get In Touch

6 + 8 =

UseTech Design, LLC
TROY, MI • BLOOMFIELD HILLS, MI
Call or text +1(734) 367-4100

Approaching AI: How Today’s Businesses Can Harness Its Capabilities

Artificial Intelligence (AI) has transitioned from being a speculative concept in science fiction to a transformative force across numerous industries. Among the most intriguing aspects of AI are AI agents, which are software entities that perform tasks on behalf of users. Understanding AI agents in real-world terms involves examining their components, capabilities, applications, and the ethical considerations they raise.

AI Agents: Bridging the Gap Between Technology and Real-World Applications

Among the most intriguing aspects of AI are AI agents, which are software entities that perform tasks on behalf of users. Understanding AI agents in real-world terms involves examining their components, capabilities, applications, and the ethical considerations they raise.

Utilizing AI Agents for Effective Legacy Code Modernization

As companies strive to keep pace with innovation, the modernization of legacy code becomes imperative. Artificial Intelligence (AI) agents offer a compelling solution to this problem, providing sophisticated tools and methodologies to facilitate the transition from legacy systems to modern architectures.

Embracing the Future: How AI Agents Will Change Everything

The future with AI agent technology holds immense promise for transforming our world in profound and unprecedented ways. From personalized experiences and seamless integration into daily life to empowering human-computer collaboration and revolutionizing healthcare, AI agents are poised to redefine the way we live, work, and interact with technology.

AI Agents vs. Traditional Customer Support: A Comparative Analysis

While traditional support offers a human touch and emotional connection, AI agents provide scalability, efficiency, and 24/7 availability. Moving forward, businesses must carefully assess their unique needs and customer expectations to determine the optimal balance between AI-driven automation and human interaction.

The Future of Business Intelligence: AI Solutions for Data-driven Decision Making

The future of business intelligence is AI-powered, where data becomes not just a strategic asset but a competitive advantage. In today’s hyper-connected digital world, data has become the lifeblood of business operations. Every click, purchase, and interaction generates valuable information that, when analyzed effectively, can provide crucial insights for strategic decision-making.

Democratized AI: Making Artificial Intelligence Accessible to All

Democratized AI has the potential to revolutionize industries and improve society by making AI technologies more accessible and inclusive. However, it also presents challenges such as data privacy, bias, and ethical considerations that must be addressed to ensure responsible implementation.

Explainable AI (XAI): Techniques and Methodologies within the Field of AI

Imagine a black box. You feed data into it, and it spits out a decision. That’s how many AI systems have traditionally functioned. This lack of transparency can be problematic, especially when it comes to trusting the AI’s reasoning. This is where Explainable AI (XAI) comes in.

Building an AI-Ready Workforce: Key Skills and Training Strategies

As artificial intelligence (AI) continues to transform industries and reshape the employment landscape, the demand for a skilled AI-ready workforce intensifies. Organizations across various sectors are recognizing the imperative of equipping their employees with the necessary skills and knowledge to thrive in an AI-driven world.

Working Together: Approaches to Multi-agent Collaboration in AI

Imagine a team of specialists – a data whiz, a communication expert, and an action master – all working in sync. This is the power of multi-agent collaboration, with the potential to revolutionize fields like scientific discovery, robotics, and self-driving cars. But getting these AI agents to collaborate effectively presents unique challenges