diff options
author | alcoheca <alcoheca@svn> | 2010-03-26 23:42:37 +0000 |
---|---|---|
committer | alcoheca <alcoheca@svn> | 2010-03-26 23:42:37 +0000 |
commit | af8cb2a400e9caacea314b8db33d32c1b4dadd82 (patch) | |
tree | ebf5b01882ec1f80179a4dc386bde8c047d5f8a0 | |
parent | 6d3222ee864c9e94c2c25a4f44b572e3eea9b5b8 (diff) |
Revert "wip"
This reverts commit 0e8d33e10e4d1649827865b3aec4bb5231666f88.
git-svn-id: https://xbmc.svn.sourceforge.net/svnroot/xbmc/trunk@28883 568bbfeb-2a22-0410-94d2-cc84cf5bfa90
33 files changed, 540 insertions, 16726 deletions
diff --git a/Makefile.in b/Makefile.in index 11771e6e4f..1384045e31 100644 --- a/Makefile.in +++ b/Makefile.in @@ -90,7 +90,6 @@ endif LIB_DIRS=\ xbmc/lib/cximage-6.0 \ - xbmc/lib/libcpluff \ xbmc/lib/libexif \ xbmc/lib/libhdhomerun \ xbmc/lib/libid3tag \ @@ -360,11 +359,9 @@ ifeq ($(or $(findstring powerpc,$(ARCH)),$(findstring x86_64-linux,$(ARCH))),) endif imagelib: dllloader $(MAKE) -C xbmc/lib/cximage-6.0 -libcpluff: dllloader - $(MAKE) -C xbmc/lib/libcpluff codecs: papcodecs dvdpcodecs -libs: libcpluff libhdhomerun libid3tag imagelib libexif python +libs: libhdhomerun libid3tag imagelib libexif python externals: codecs libs python visualizations screensavers xcode_depends: \ diff --git a/addons/cn.1ting.scraper/1ting.xml b/addons/com.1ting.scraper/1ting.xml index 050e8170dd..050e8170dd 100644 --- a/addons/cn.1ting.scraper/1ting.xml +++ b/addons/com.1ting.scraper/1ting.xml diff --git a/addons/cn.1ting.scraper/default.tbn b/addons/com.1ting.scraper/default.tbn Binary files differindex 2c4c603e6f..2c4c603e6f 100644 --- a/addons/cn.1ting.scraper/default.tbn +++ b/addons/com.1ting.scraper/default.tbn diff --git a/addons/cn.1ting.scraper/description.xml b/addons/com.1ting.scraper/description.xml index 906f847b5d..906f847b5d 100644 --- a/addons/cn.1ting.scraper/description.xml +++ b/addons/com.1ting.scraper/description.xml diff --git a/addons/org.xbmc.addons.dxspectrum/addon.xml b/addons/org.xbmc.addons.dxspectrum/addon.xml deleted file mode 100644 index 33ca2f446e..0000000000 --- a/addons/org.xbmc.addons.dxspectrum/addon.xml +++ /dev/null @@ -1,17 +0,0 @@ -<?xml version="1.0" encoding="UTF-8" standalone="yes"?> -<addoninfo> - <id>vis.dxspectrum</id> - <type>visualization</type> - <title>DirectX Spectrum</title> - <librarywin32>Spectrum_win32dx.vis</librarywin32> - <version>1.0.0</version> - <platforms> - <platform>windows</platform> - </platforms> - <minversion> - <xbmc>28000</xbmc> - </minversion> - <summary>DirectX Visualization</summary> - <description>Visualisation showing a rotating 3D Spectrum Analyzer</description> - <author>TEAMXBMC</author> -</addoninfo> diff --git a/addons/org.xbmc.addons.glspectrum/addon.xml b/addons/org.xbmc.addons.glspectrum/addon.xml deleted file mode 100644 index 01a2fc9f9a..0000000000 --- a/addons/org.xbmc.addons.glspectrum/addon.xml +++ /dev/null @@ -1,15 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<addon - id="org.xbmc.addons.glspectrum" - version="1.0.0" - name="OpenGL Spectrum" - summary="Visualisation showing a rotating 3D Spectrum Analyzer" - provider-name="TEAMXBMC"> - <requires> - <import addon="org.xbmc.core" version="28000"/> - </requires> - <extension - point="org.xbmc.player.viz" - name="OpenGL Spectrum" - visualization="glspectrum"/> -</addon> diff --git a/addons/org.xbmc.addons.glspectrum/opengl_spectrum.vis b/addons/org.xbmc.addons.glspectrum/opengl_spectrum.vis Binary files differdeleted file mode 100755 index 58b3e0ef53..0000000000 --- a/addons/org.xbmc.addons.glspectrum/opengl_spectrum.vis +++ /dev/null diff --git a/addons/org.xbmc.addons.waveform/Waveform.vis b/addons/org.xbmc.addons.waveform/Waveform.vis Binary files differdeleted file mode 100755 index e0d3c9efb7..0000000000 --- a/addons/org.xbmc.addons.waveform/Waveform.vis +++ /dev/null diff --git a/addons/org.xbmc.core.vfs/core.c b/addons/org.xbmc.core.vfs/core.c deleted file mode 100644 index 124de7babe..0000000000 --- a/addons/org.xbmc.core.vfs/core.c +++ /dev/null @@ -1,256 +0,0 @@ -/* -* Copyright (C) 2010 Team XBMC -* http://www.xbmc.org -* -* This Program is free software; you can redistribute it and/or modify -* it under the terms of the GNU General Public License as published by -* the Free Software Foundation; either version 2, or (at your option) -* any later version. -* -* This Program is distributed in the hope that it will be useful, -* but WITHOUT ANY WARRANTY; without even the implied warranty of -* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -* GNU General Public License for more details. -* -* You should have received a copy of the GNU General Public License -* along with XBMC; see the file COPYING. If not, write to -* the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. -* http://www.gnu.org/copyleft/gpl.html -* -*/ - -#include "core.h" - -/* ------------------------------------------------------------------------ - * Data types - * ----------------------------------------------------------------------*/ - - -/** Type for xbmc_vfs_ops structure */ -typedef struct xbmc_vfs_ops *vfsprovider_t; - -/** Plug-in instance data */ -struct plugin_data_t { - - /** The plug-in context */ - cp_context_t *ctx; - - /** Number of registered providers */ - int num_providers; - - /** An array of registered file system providers */ - registered_vfsprovider_t *providers; -}; - -typedef struct plugin_data_t plugin_data_t; - -/** Registered vfs addon info */ -struct registered_vfsprovider_t { - const char *protocol; - xbmc_vfs_ops *ops; -}; - - -/* ------------------------------------------------------------------------ - * Internal functions - * ----------------------------------------------------------------------*/ - -/** - * A run function for the core plug-in. In this case this function acts as - * the application main function so there is no need for us to split the - * execution into small steps. Rather, we execute the whole main loop at - * once to make it simpler. - */ -static int run(void *d) { - plugin_data_t *data = d; - char **argv; - int argc; - int i; - - // Go through all files listed as command arguments - for (i = 1; argv[i] != NULL; i++) { - int j; - int classified = 0; - - // Print file name - printf("%s: ", argv[i]); - - // Try providers in order of descending priority - for (j = 0; !classified && j < data->num_providers; j++) { - vfsprovider_t *cl - = data->providers[j].provider; - - classified = cl->direxists(cl->data, argv[i]); - } - - // Check if unknown file - if (!classified) { - fputs("unknown file type\n", stdout); - } - } - - // All done - return 0; -} - -/** - * Creates a new plug-in instance. - */ -static void *create(cp_context_t *ctx) { - plugin_data_t *data = malloc(sizeof(plugin_data_t)); - if (data != NULL) { - data->ctx = ctx; - data->num_providers = 0; - data->providers = NULL; - } else { - cp_log(ctx, CP_LOG_ERROR, - "Insufficient memory for plug-in data."); - } - return data; -} - -/** - * Initializes and starts the plug-in. - */ -static int start(void *d) { - plugin_data_t *data = d; - cp_extension_t **pr_exts; - int num_pr_exts; - cp_status_t status; - int i; - - // Obtain list of registered file system providers - pr_exts = cp_get_extensions_info( - data->ctx, - "org.xbmc.vfs.providers", - &status, - &num_pr_exts - ); - if (pr_exts == NULL) { - - // An error occurred and framework logged it - return status; - } - - // Allocate memory for vfsprovider information, if any - if (num_pr_exts > 0) { - data->providers = malloc( - num_pr_exts * sizeof(registered_vfsprovider_t) - ); - if (data->providers == NULL) { - // Memory allocation failed - cp_log(data->ctx, CP_LOG_ERROR, - "Insufficient memory for providers list."); - return CP_ERR_RESOURCE; - } - } - - /* Resolve providers functions. This will implicitly start - * plug-ins providing the file systems. */ - for (i = 0; i < num_pr_exts; i++) { - const char *str; - vfsprovider_t *pr; - - // Resolve provider data pointer - str = cp_lookup_cfg_value( - pr_exts[i]->configuration, "@provider"); - if (str == NULL) { - - // Provider symbol name is missing - cp_log(data->ctx, CP_LOG_ERROR, - "Ignoring vfsprovider without symbol name."); - continue; - } - pr = cp_resolve_symbol( - data->ctx, - pr_exts[i]->plugin->identifier, - str, - NULL - ); - if (pr == NULL) { - - // Could not resolve provider symbol - cp_log(data->ctx, CP_LOG_ERROR, - "Ignoring provider which could not be resolved."); - continue; - } - - // Get the protocol supported - str = cp_lookup_cfg_value( - pr_exts[i]->configuration, "@protocol" - ); - if (str == NULL) { - - // provider is missing mandatory protocol - cp_log(data->ctx, CP_LOG_ERROR, - "Ignoring provider without protocol."); - continue; - - // Add provider to the list of registered providers - data->providers[data->num_providers].protocol = str; - data->providers[data->num_providers].provider = pr; - data->num_providers++; - } - } - - // Release extension information - cp_release_info(data->ctx, pr_exts); - - // Register run function to do the real work - cp_run_function(data->ctx, run); - - if (data->num_providers) { - cp_log(data->ctx, CP_LOG_DEBUG, - "VFS: NO providers %i"); - } else { - cp_log(data->ctx, CP_LOG_DEBUG, - "VFS: some providers %i"); - } - - // Successfully started - return CP_OK; -} - -/** - * Releases resources from other plug-ins. - */ -static void stop(void *d) { - plugin_data_t *data = d; - int i; - - if (data->providers != NULL) { - for (i = 0; i < data->num_providers; i++) { - cp_release_symbol( - data->ctx, data->providers[i].provider - ); - } - - // Free local data - free(data->providers); - data->providers = NULL; - data->num_providers = 0; - } -} - -/** - * Destroys a plug-in instance. - */ -static void destroy(void *d) { - free(d); -} - - -/* ------------------------------------------------------------------------ - * Exported runtime information - * ----------------------------------------------------------------------*/ - -/** - * Plug-in runtime information for the framework. The name of this symbol - * is stored in the plug-in descriptor. - */ -CP_EXPORT cp_plugin_runtime_t xbmc_vfs_providers_core_funcs = { - create, - start, - stop, - destroy -}; diff --git a/addons/org.xbmc.core.vfs/core.h b/addons/org.xbmc.core.vfs/core.h deleted file mode 100644 index 7df8b28a8b..0000000000 --- a/addons/org.xbmc.core.vfs/core.h +++ /dev/null @@ -1,128 +0,0 @@ -/* -* Copyright (C) 2010 Team XBMC -* http://www.xbmc.org -* -* This Program is free software; you can redistribute it and/or modify -* it under the terms of the GNU General Public License as published by -* the Free Software Foundation; either version 2, or (at your option) -* any later version. -* -* This Program is distributed in the hope that it will be useful, -* but WITHOUT ANY WARRANTY; without even the implied warranty of -* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -* GNU General Public License for more details. -* -* You should have received a copy of the GNU General Public License -* along with XBMC; see the file COPYING. If not, write to -* the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. -* http://www.gnu.org/copyleft/gpl.html -* -*/ - -#ifndef VFSPROVIDER_H -#define VFSPROVIDER_H - -#include <fcntl.h> -#include <sys/stat.h> -#include <sys/types.h> -#include <sys/statvfs.h> - -#ifdef __cplusplus -extern "C" { -#endif - -/** Function to add an entry in a readdir() operation - * - * @param buf the buffer passed to the readdir() operation - * @param name the file name of the directory entry - * @param stat file attributes, can be NULL - * @param off offset of the next entry or zero - * @return 1 if buffer is full, zero otherwise - */ -typedef int (*xbmc_vfs_fill_dir_t) (void *buf, const char *name, - const struct stat *stbuf, off_t off); - -/** - * VFS operations: - * - */ -struct xbmc_vfs_operations { - - /** Provider specific runtime data */ - void *data; - - /** Get file attributes */ - int (*stat) (const char *, struct stat *); - - /** Create a directory */ - int (*mkdir) (const char *, mode_t); - - /** Remove a directory */ - int (*rmdir) (const char *); - - /** Rename a file */ - int (*rename) (const char *, const char *); - - /** Change the size of a file */ - int (*truncate) (const char *, off_t); - - /** File open operation */ - int (*open) (const char *, struct fuse_file_info *); - - /** Read data from an open file */ - int (*read) (const char *, char *, size_t, off_t, - struct fuse_file_info *); - - /** Check file access permissions */ - int (*access) (const char *, int); - - /** Write data to an open file */ - int (*write) (const char *, const char *, size_t, off_t, - struct fuse_file_info *); - - /** Create and open a file */ - int (*create) (const char *, mode_t, struct fuse_file_info *); - - /** Change the size of an open file */ - int (*ftruncate) (const char *, off_t, struct fuse_file_info *); - - /** Get attributes from an open file */ - int (*fgetattr) (const char *, struct stat *, struct fuse_file_info *); - - /** Get file system statistics */ - int (*statfs) (const char *, struct statvfs *); - - /** Possibly flush cached data */ - int (*flush) (const char *, struct fuse_file_info *); - - /** Release an open file */ - int (*release) (const char *, struct fuse_file_info *); - - /** Synchronize file contents */ - int (*fsync) (const char *, int, struct fuse_file_info *); - - /** Set extended attributes */ - int (*setxattr) (const char *, const char *, const char *, size_t, int); - - /** Get extended attributes */ - int (*getxattr) (const char *, const char *, char *, size_t); - - /** List extended attributes */ - int (*listxattr) (const char *, char *, size_t); - - /** Remove extended attributes */ - int (*removexattr) (const char *, const char *); - - /** Open directory */ - int (*opendir) (const char *, struct fuse_file_info *); - - /** Read directory */ - int (*readdir) (const char *, void *, fuse_fill_dir_t, off_t, - struct fuse_file_info *); - - /** Release directory */ - int (*releasedir) (const char *, struct fuse_file_info *); - -}; - -#endif /*VFSPRIVIDER_H*/ diff --git a/addons/org.xbmc.core.vfs/plugin.xml b/addons/org.xbmc.core.vfs/plugin.xml deleted file mode 100644 index 3f9f311f39..0000000000 --- a/addons/org.xbmc.core.vfs/plugin.xml +++ /dev/null @@ -1,15 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<plugin - id="org.xbmc.core.vfs" - version="0.1" - name="Core logic for vfsprovider" - provider-name="TEAMXBMC"> - <requires> - <c-pluff version="0.1"/> - </requires> - <runtime library="libxbmcvfs" funcs="xbmc_vfs_core_funcs"/> - <extension-point - id="provider" - name="File system provider" - schema="vfsprovider.xsd"/> -</plugin> diff --git a/addons/org.xbmc.core.vfs/vfsprovider.xsd b/addons/org.xbmc.core.vfs/vfsprovider.xsd deleted file mode 100644 index d51d3d7bf7..0000000000 --- a/addons/org.xbmc.core.vfs/vfsprovider.xsd +++ /dev/null @@ -1,27 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<!DOCTYPE schema PUBLIC "-//W3C//DTD XMLSCHEMA 200102//EN" "http://www.w3.org/2001/XMLSchema.dtd"> -<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"> - <xs:element name="extension"> - <xs:complexType> - <xs:attribute name="point" type="xs:string" use="required"/> - <xs:attribute name="id" type="simpleIdentifier"/> - <xs:attribute name="name" type="xs:string"/> - - <!-- - * The provider is the name of a symbol pointing to - * vfsprovider_t strucutre. Protocol determines the order - * the file classifiers are tried. Cached states whether XBMC - * should cache the file access. - --> - <xs:attribute name="provider" type="xs:string" use="required"/> - <xs:attribute name="protocol" type="xs:string" use="required"/> - <xs:attribute name="cached" type="xs:boolean" use="required"/> - - </xs:complexType> - </xs:element> - <xs:simpleType name="simpleIdentifier"> - <xs:restriction base="xs:string"> - <xs:pattern value="[^.]+"/> - </xs:restriction> - </xs:simpleType> -</xs:schema> diff --git a/configure.in b/configure.in index df090355dd..1a0b91ccb2 100644 --- a/configure.in +++ b/configure.in @@ -1032,7 +1032,6 @@ OUTPUT_FILES="Makefile \ guilib/Makefile \ guilib/common/Makefile \ xbmc/lib/libass/xbmc/Makefile \ - xbmc/lib/libcpluff/Makefile \ xbmc/lib/libXBMS/Makefile \ xbmc/lib/libRTV/Makefile \ xbmc/lib/libexif/Makefile \ diff --git a/language/English/strings.xml b/language/English/strings.xml index 1b8f996b29..5712e44538 100644 --- a/language/English/strings.xml +++ b/language/English/strings.xml @@ -2112,7 +2112,6 @@ <string id="24008">Screensaver</string> <string id="24009">Script</string> <string id="24010">Visualization</string> - <string id="24011">File system</string> <string id="24020">Configure Add-on</string> <string id="24021">Disable Add-on</string> diff --git a/xbmc/FileSystem/FactoryDirectory.cpp b/xbmc/FileSystem/FactoryDirectory.cpp index 07f302ac06..3917c00c67 100644 --- a/xbmc/FileSystem/FactoryDirectory.cpp +++ b/xbmc/FileSystem/FactoryDirectory.cpp @@ -34,7 +34,6 @@ #include "MusicDatabaseDirectory.h" #include "MusicSearchDirectory.h" #include "VideoDatabaseDirectory.h" -#include "AddonDirectory.h" #include "AddonsDirectory.h" #include "ShoutcastDirectory.h" #include "LastFMDirectory.h" @@ -193,6 +192,7 @@ IDirectory* CFactoryDirectory::Create(const CStdString& strPath) #endif } - return CAddonDirectory::GetDirectory(strProtocol); + CLog::Log(LOGWARNING, "CFactoryDirectory::Create - Unsupported protocol %s", strProtocol.c_str()); + return NULL; } diff --git a/xbmc/GUIWindowAddonBrowser.cpp b/xbmc/GUIWindowAddonBrowser.cpp index 94f27f9c00..6b80ad8370 100644 --- a/xbmc/GUIWindowAddonBrowser.cpp +++ b/xbmc/GUIWindowAddonBrowser.cpp @@ -168,6 +168,7 @@ void CGUIWindowAddonBrowser::Update() pItem->SetProperty("Addon.Name", addon->Name()); pItem->SetProperty("Addon.Version", addon->Version().Print()); pItem->SetProperty("Addon.Summary", addon->Summary()); + pItem->SetProperty("Addon.Description", addon->Description()); pItem->SetProperty("Addon.Creator", addon->Author()); pItem->SetProperty("Addon.Disclaimer", addon->Disclaimer()); pItem->SetProperty("Addon.Rating", addon->Stars()); diff --git a/xbmc/addons/Addon.cpp b/xbmc/addons/Addon.cpp index 40e51ffedf..327451e748 100644 --- a/xbmc/addons/Addon.cpp +++ b/xbmc/addons/Addon.cpp @@ -103,51 +103,39 @@ const CStdString TranslateType(const ADDON::TYPE &type, bool pretty/*=false*/) { if (pretty) return g_localizeStrings.Get(24007); - return "org.xbmc.library.scraper"; + return "scraper"; } case ADDON::ADDON_SCRAPER_LIBRARY: { - return "org.xbmc.library.scraper.include"; + return "scraper-library"; } case ADDON::ADDON_SCREENSAVER: { if (pretty) return g_localizeStrings.Get(24008); - return "org.xbmc.ui.screensaver"; + return "screensaver"; } case ADDON::ADDON_VIZ: { if (pretty) return g_localizeStrings.Get(24010); - return "org.xbmc.player.viz"; + return "visualization"; } case ADDON::ADDON_VIZ_LIBRARY: { - return "org.xbmc.player.viz.presetpack"; + return "visualization-library"; } case ADDON::ADDON_PLUGIN: { if (pretty) return g_localizeStrings.Get(24005); - return "org.xbmc.library.plugin"; + return "plugin"; } case ADDON::ADDON_SCRIPT: { if (pretty) return g_localizeStrings.Get(24009); - return "org.xbmc.ui.applets"; - } - case ADDON::ADDON_SKIN: - { - if (pretty) - return g_localizeStrings.Get(24009); - return "org.xbmc.ui.skin"; - } - case ADDON::ADDON_VFSDLL: - { - if (pretty) - return g_localizeStrings.Get(24011); - return "org.xbmc.core.vfs.provider"; + return "script"; } default: { diff --git a/xbmc/addons/AddonManager.cpp b/xbmc/addons/AddonManager.cpp index de4662f7e5..688e148922 100644 --- a/xbmc/addons/AddonManager.cpp +++ b/xbmc/addons/AddonManager.cpp @@ -27,8 +27,6 @@ #include "Settings.h" #include "GUISettings.h" #include "DownloadQueueManager.h" -//#include "AdvancedSettings.h" -#include "DllLibCPluff.h" #include "log.h" #ifdef HAS_VISUALISATION @@ -43,6 +41,7 @@ #include "Scraper.h" //#endif + namespace ADDON { @@ -51,7 +50,6 @@ namespace ADDON * CAddonMgr * */ -int cp_to_clog(cp_log_severity_t lvl); CAddonMgr* CAddonMgr::m_pInstance = NULL; std::map<TYPE, IAddonMgrCallback*> CAddonMgr::m_managers; @@ -62,8 +60,6 @@ CAddonMgr::CAddonMgr() CAddonMgr::~CAddonMgr() { - if(m_cpluff) - m_cpluff->destroy(); } CAddonMgr* CAddonMgr::Get() @@ -71,49 +67,10 @@ CAddonMgr* CAddonMgr::Get() if (!m_pInstance) { m_pInstance = new CAddonMgr(); - m_pInstance->OnInit(); } return m_pInstance; } -void CAddonMgr::OnInit() -{ - m_cpluff = new DllLibCPluff; - m_cpluff->Load(); - - if (!m_cpluff->IsLoaded()) - assert(false); - - cp_status_t status; - setlocale(LC_ALL, ""); //FIXME where should this be handled? - cp_log_severity_t log; - if (g_advancedSettings.m_logLevel >= LOG_LEVEL_DEBUG_SAMBA) - log = CP_LOG_DEBUG; - else if (g_advancedSettings.m_logLevel >= LOG_LEVEL_DEBUG) - log = CP_LOG_INFO; - else - log = CP_LOG_WARNING; - - m_cpluff->set_fatal_error_handler(cp_fatalErrorHandler); - status = m_cpluff->init(); - if (status != CP_OK) - { - CLog::Log(LOGERROR, "ADDONS: Fatal Error, cp_init() returned status: %i", status); - assert(false); - } - - //TODO could separate addons into different contexts - // would allow partial unloading of addon framework - m_cp_context = m_cpluff->create_context(&status); - assert(m_cp_context); - - status = m_cpluff->register_pcollection(m_cp_context, "/home/alasdair/code/git-xbmc/addons"); - assert(status == CP_OK); - status = m_cpluff->register_logger(m_cp_context, cp_logger, &CAddonMgr::m_pInstance, CP_LOG_INFO); - assert(status == CP_OK); - status = m_cpluff->scan_plugins(m_cp_context, 0); -} - IAddonMgrCallback* CAddonMgr::GetCallbackForType(TYPE type) { if (m_managers.find(type) == m_managers.end()) @@ -140,8 +97,14 @@ void CAddonMgr::UnregisterAddonMgrCallback(TYPE type) bool CAddonMgr::HasAddons(const TYPE &type, const CONTENT_TYPE &content/*= CONTENT_NONE*/, bool enabledOnly/*= true*/) { - if (type == ADDON_VFSDLL) - return true; + if (m_addons.empty()) + { + VECADDONS add; + GetAllAddons(add,false); + } + + if (content == CONTENT_NONE) + return (m_addons.find(type) != m_addons.end()); VECADDONS addons; return GetAddons(type, addons, content, enabledOnly); @@ -174,7 +137,18 @@ void CAddonMgr::OnFileComplete(TICKET aTicket, CStdString& aFilePath, INT aByteR bool CAddonMgr::GetAllAddons(VECADDONS &addons, bool enabledOnly/*= true*/) { - return true; + VECADDONS temp; + if (CAddonMgr::Get()->GetAddons(ADDON_PLUGIN, temp, CONTENT_NONE, enabledOnly)) + addons.insert(addons.end(), temp.begin(), temp.end()); + if (CAddonMgr::Get()->GetAddons(ADDON_SCRAPER, temp, CONTENT_NONE, enabledOnly)) + addons.insert(addons.end(), temp.begin(), temp.end()); + if (CAddonMgr::Get()->GetAddons(ADDON_SCREENSAVER, temp, CONTENT_NONE, enabledOnly)) + addons.insert(addons.end(), temp.begin(), temp.end()); + if (CAddonMgr::Get()->GetAddons(ADDON_SCRIPT, temp, CONTENT_NONE, enabledOnly)) + addons.insert(addons.end(), temp.begin(), temp.end()); + if (CAddonMgr::Get()->GetAddons(ADDON_VIZ, temp, CONTENT_NONE, enabledOnly)) + addons.insert(addons.end(), temp.begin(), temp.end()); + return !addons.empty(); } bool CAddonMgr::GetAddons(const TYPE &type, VECADDONS &addons, const CONTENT_TYPE &content/*= CONTENT_NONE*/, bool enabledOnly/*= true*/) @@ -185,12 +159,26 @@ bool CAddonMgr::GetAddons(const TYPE &type, VECADDONS &addons, const CONTENT_TYP if(!m_lastDirScan.IsValid() || (m_lastDirScan + span) < CDateTime::GetCurrentDateTime()) { m_lastDirScan = CDateTime::GetCurrentDateTime(); - cp_status_t status = m_cpluff->scan_plugins(m_cp_context, 0); - if (status != CP_OK) - CLog::Log(LOGERROR, "ADDON: CPluff scan_plugins() failed"); + LoadAddonsXML(); } - return GetExtensions(type, addons, content); + addons.clear(); + if (m_addons.find(type) != m_addons.end()) + { + IVECADDONS itr = m_addons[type].begin(); + while (itr != m_addons[type].end()) + { // filter out what we're not looking for + if ((enabledOnly && (*itr)->Disabled()) + || (content != CONTENT_NONE && !(*itr)->Supports(content))) + { + ++itr; + continue; + } + addons.push_back(*itr); + ++itr; + } + } + return !addons.empty(); } bool CAddonMgr::GetAddon(const CStdString &str, AddonPtr &addon, const TYPE &type/*=ADDON_UNKNOWN*/, bool enabledOnly/*= true*/) @@ -293,6 +281,9 @@ bool CAddonMgr::LoadAddonsXML() if (!LoadAddonsXML(props)) return false; + // refresh addon dirs if neccesary/forced + FindAddons(); + // now enable accordingly VECADDONPROPS::const_iterator itr = props.begin(); while (itr != props.end()) @@ -319,6 +310,470 @@ bool CAddonMgr::LoadAddonsXML() return true; } +void CAddonMgr::FindAddons() +{ + // parse the user & system dirs for addons of the requested type + CFileItemList items; + if (!CSpecialProtocol::XBMCIsHome()) + CDirectory::GetDirectory("special://home/addons", items); + CDirectory::GetDirectory("special://xbmc/addons", items); + + // store any addons with unresolved deps, then recheck at the end + VECADDONS unresolved; + + // for all folders found + for (int i = 0; i < items.Size(); ++i) + { + CFileItemPtr item = items[i]; + + if(!item->m_bIsFolder) + continue; + + // read description.xml and populate the addon + AddonPtr addon; + if (!AddonFromInfoXML(item->m_strPath, addon)) + continue; + + // refuse to store addons with missing library + CStdString library(CUtil::AddFileToFolder(addon->Path(), addon->LibName())); + if (!CFile::Exists(library)) + { + CLog::Log(LOGDEBUG, "ADDON: Missing library file %s, bypassing package", library.c_str()); + continue; + } + + // check for/cache icon thumbnail + //TODO cache one thumb per addon id instead + CFileItem item2(CUtil::AddFileToFolder(addon->Path(), addon->LibName()), false); + item2.SetCachedProgramThumb(); + if (!item2.HasThumbnail()) + item2.SetUserProgramThumb(); + if (!item2.HasThumbnail()) + item2.SetThumbnailImage(addon->Icon()); + if (item2.HasThumbnail()) + { + XFILE::CFile::Cache(item2.GetThumbnailImage(),item->GetCachedProgramThumb()); + } + + if (!DependenciesMet(addon)) + { + unresolved.push_back(addon); + continue; + } + else + { // everything ok, add to available addons if new + if (UpdateIfKnown(addon)) + continue; + else + { + m_addons[addon->Type()].push_back(addon); + m_idMap.insert(std::make_pair(addon->ID(), addon)); + } + } + } + + for (unsigned i = 0; i < unresolved.size(); i++) + { + AddonPtr& addon = unresolved[i]; + if (DependenciesMet(addon)) + { + if (!UpdateIfKnown(addon)) + { + m_addons[addon->Type()].push_back(addon); + m_idMap.insert(std::make_pair(addon->ID(), addon)); + } + } + } +// CLog::Log(LOGINFO, "ADDON: Found %"PRIuS" addons", m_addons.find(type) == m_addons.end() ? 0: m_addons[type].size(), TranslateType(type).c_str()); +} + +bool CAddonMgr::UpdateIfKnown(AddonPtr &addon) +{ + if (m_addons.find(addon->Type()) != m_addons.end()) + { + for (unsigned i = 0; i < m_addons[addon->Type()].size(); i++) + { + if (m_addons[addon->Type()][i]->ID() == addon->ID()) + { + //TODO inform any manager first, and request removal + //TODO choose most recent version if varying + m_addons[addon->Type()][i] = addon; + CStdString id = addon->ID(); + m_idMap.erase(id); + m_idMap.insert(std::make_pair(addon->ID(), addon)); + return true; + } + } + } + return false; +} + +bool CAddonMgr::DependenciesMet(AddonPtr &addon) +{ + // As remote repos are not functioning, + // this will fail if a dependency is not found locally + if (!addon) + return false; + + ADDONDEPS deps = addon->GetDeps(); + ADDONDEPS::iterator itr = deps.begin(); + while (itr != deps.end()) + { + CStdString id; + id = (*itr).first; + AddonVersion min = (*itr).second.first; + AddonVersion max = (*itr).second.second; + if (m_idMap.count(id)) + { + AddonPtr dep = m_idMap[id]; + // we're guaranteed to have at least max OR min here + if (!min.str.IsEmpty() && !max.str.IsEmpty()) + return (dep->Version() >= min && dep->Version() <= max); + else if (!min.str.IsEmpty()) + return (dep->Version() >= min); + else + return (dep->Version() <= max); + } + for (unsigned i=0; i < m_remoteAddons.size(); i++) + { + if (m_remoteAddons[i].id == id) + { + if(m_remoteAddons[i].version >= min && m_remoteAddons[i].version <= max) + { + //TODO line up download + return false; + } + } + } + itr++; + } + return deps.empty(); +} + +bool CAddonMgr::AddonFromInfoXML(const CStdString &path, AddonPtr &addon) +{ + // First check that we can load description.xml + CStdString strPath(CUtil::AddFileToFolder(path, ADDON_METAFILE)); + if(!CFile::Exists(strPath)) + return false; + + TiXmlDocument xmlDoc; + if (!xmlDoc.LoadFile(strPath)) + { + CLog::Log(LOGERROR, "Unable to load: %s, Line %d\n%s", strPath.c_str(), xmlDoc.ErrorRow(), xmlDoc.ErrorDesc()); + return false; + } + + TiXmlElement *element = xmlDoc.RootElement(); + if (!element || strcmpi(element->Value(), "addoninfo") != 0) + { + CLog::Log(LOGERROR, "ADDON: Error loading %s: cannot find <addon> root element", strPath.c_str()); + return false; + } + + /* Steps required to meet package requirements + * 1. id exists and is valid + * 2. type exists and is valid + * 3. version exists + * 4. a license is specified + * 5. operating system matches ours + * 6. summary exists + * 7. for scrapers & plugins, support at least one type of content + * + * NOTE: addon dependencies are handled in ::FindAddons() + */ + + /* Validate id */ + CStdString id; + element = NULL; + element = xmlDoc.RootElement()->FirstChildElement("id"); + if (!element) + { + CLog::Log(LOGERROR, "ADDON: %s missing <id> element, ignoring", strPath.c_str()); + return false; + } + id = element->GetText(); + //FIXME since we no longer required uuids, should we bother validating anything? + if (id.IsEmpty()) + { + CLog::Log(LOGERROR, "ADDON: %s has invalid <id> element, ignoring", strPath.c_str()); + return false; + } + + /* Validate type */ + TYPE type; + element = NULL; + element = xmlDoc.RootElement()->FirstChildElement("type"); + if (!element) + { + CLog::Log(LOGERROR, "ADDON: %s missing <id> element, ignoring", strPath.c_str()); + return false; + } + type = TranslateType(element->GetText()); + if (type == ADDON_UNKNOWN) + { + CLog::Log(LOGERROR, "ADDON: %s has invalid type identifier: '%d'", strPath.c_str(), type); + return false; + } + + /* Retrieve Name */ + CStdString name; + element = NULL; + element = xmlDoc.RootElement()->FirstChildElement("title"); + if (!element) + { + CLog::Log(LOGERROR, "ADDON: %s missing <title> element, ignoring", strPath.c_str()); + return false; + } + name = element->GetText(); + + /* Retrieve version */ + CStdString version; + element = NULL; + element = xmlDoc.RootElement()->FirstChildElement("version"); + if (!element) + { + CLog::Log(LOGERROR, "ADDON: %s missing <version> element, ignoring", strPath.c_str()); + return false; + } + /* Validate version */ + version = element->GetText(); + CRegExp versionRE; + versionRE.RegComp(ADDON_VERSION_RE.c_str()); + if (versionRE.RegFind(version.c_str()) != 0) + { + CLog::Log(LOGERROR, "ADDON: %s has invalid <version> element, ignoring", strPath.c_str()); + return false; + } + + /* Path, ID & Version are valid */ + AddonProps addonProps(id, type, version); + addonProps.name = name; + addonProps.path = path; + addonProps.icon = CUtil::AddFileToFolder(path, "default.tbn"); + + /* Retrieve license */ + element = NULL; + element = xmlDoc.RootElement()->FirstChildElement("license"); +/* if (!element) + { + CLog::Log(LOGERROR, "ADDON: %s missing <license> element, ignoring", strPath.c_str()); + return false; + } + addonProps.license = element->GetText();*/ + + /* Retrieve platforms which this addon supports */ + CStdString platform; + element = NULL; + element = xmlDoc.RootElement()->FirstChildElement("platforms")->FirstChildElement("platform"); + if (!element) + { + CLog::Log(LOGERROR, "ADDON: %s missing <platforms> element, ignoring", strPath.c_str()); + return false; + } + + bool all(false); + std::set<CStdString> platforms; + do + { + CStdString platform = element->GetText(); + if (platform == "all") + { + all = true; + break; + } + platforms.insert(platform); + element = element->NextSiblingElement("platform"); + } while (element != NULL); + + if (!all) + { +#if defined(_LINUX) && !defined(__APPLE__) + if (!platforms.count("linux")) + { + CLog::Log(LOGNOTICE, "ADDON: %s is not supported under Linux, ignoring", strPath.c_str()); + return false; + } +#elif defined(_WIN32) + if (!platforms.count("windows")) + { + CLog::Log(LOGNOTICE, "ADDON: %s is not supported under Windows, ignoring", strPath.c_str()); + return false; + } +#elif defined(__APPLE__) + if (!platforms.count("osx")) + { + CLog::Log(LOGNOTICE, "ADDON: %s is not supported under OSX, ignoring", strPath.c_str()); + return false; + } +#elif defined(_XBOX) + if (!platforms.count("xbox")) + { + CLog::Log(LOGNOTICE, "ADDON: %s is not supported under XBOX, ignoring", strPath.c_str()); + return false; + } +#endif + } + + /* Retrieve summary */ + CStdString summary; + element = NULL; + element = xmlDoc.RootElement()->FirstChildElement("summary"); + if (!element) + { + CLog::Log(LOGERROR, "ADDON: %s missing <summary> element, ignoring", strPath.c_str()); + return false; + } + addonProps.summary = element->GetText(); + + if (addonProps.type == ADDON_SCRAPER || addonProps.type == ADDON_PLUGIN) + { + /* Retrieve content types that this addon supports */ + CStdString platform; + element = NULL; + if (xmlDoc.RootElement()->FirstChildElement("supportedcontent")) + { + element = xmlDoc.RootElement()->FirstChildElement("supportedcontent")->FirstChildElement("content"); + } + if (!element) + { + CLog::Log(LOGERROR, "ADDON: %s missing <supportedcontent> element, ignoring", strPath.c_str()); + return false; + } + + std::set<CONTENT_TYPE> contents; + do + { + CONTENT_TYPE content = TranslateContent(element->GetText()); + if (content != CONTENT_NONE) + { + contents.insert(content); + } + element = element->NextSiblingElement("content"); + } while (element != NULL); + + if (contents.empty()) + { + CLog::Log(LOGERROR, "ADDON: %s %s supports no available content-types, ignoring", TranslateType(addonProps.type).c_str(), addonProps.name.c_str()); + return false; + } + else + { + addonProps.contents = contents; + } + } + + /*** Beginning of optional fields ***/ + /* Retrieve description */ + element = NULL; + element = xmlDoc.RootElement()->FirstChildElement("description"); + if (element) + addonProps.description = element->GetText(); + + /* Retrieve author */ + element = NULL; + element = xmlDoc.RootElement()->FirstChildElement("author"); + if (element) + addonProps.author = element->GetText(); + + /* Retrieve disclaimer */ + element = NULL; + element = xmlDoc.RootElement()->FirstChildElement("disclaimer"); + if (element) + addonProps.disclaimer = element->GetText(); + + /* Retrieve library file name */ + // will be replaced with default library name if unspecified + element = NULL; + element = xmlDoc.RootElement()->FirstChildElement("library"); + if (element) + addonProps.libname = element->GetText(); + + //TODO move this to addon specific class, if it's needed at all.. +#ifdef _WIN32 + /* Retrieve WIN32 library file name in case it is present + * This is required for no overwrite to the fixed WIN32 add-on's + * during compile time + */ + element = NULL; + element = xmlDoc.RootElement()->FirstChildElement("librarywin32"); + if (element) // If it is found overwrite standard library name + addonProps.libname = element->GetText(); +#endif + + /* Retrieve dependencies that this addon requires */ + std::map<CStdString, std::pair<const AddonVersion, const AddonVersion> > deps; + element = NULL; + element = xmlDoc.RootElement()->FirstChildElement("dependencies"); + if (element) + { + element = element->FirstChildElement("dependency"); + if (!element) + CLog::Log(LOGDEBUG, "ADDON: %s missing at least one <dependency> element, will ignore this dependency", strPath.c_str()); + else + { + do + { + CStdString min = element->Attribute("minversion"); + CStdString max = element->Attribute("maxversion"); + CStdString id = element->GetText(); + if (!id || (!min && ! max)) + { + CLog::Log(LOGDEBUG, "ADDON: %s malformed <dependency> element, will ignore this dependency", strPath.c_str()); + continue; + } + deps.insert(std::make_pair(id, std::make_pair(AddonVersion(min), AddonVersion(max)))); + element = element->NextSiblingElement("dependency"); + } while (element != NULL); + addonProps.dependencies = deps; + } + } + + /*** end of optional fields ***/ + + /* Create an addon object and store in a shared_ptr */ + addon.reset(); + switch (type) + { + case ADDON_PLUGIN: + case ADDON_SCRIPT: + { + AddonPtr temp(new CAddon(addonProps)); + addon = temp; + break; + } + case ADDON_SCRAPER: + { + AddonPtr temp(new CScraper(addonProps)); + addon = temp; + break; + } + case ADDON_VIZ: + { + AddonPtr temp(new CVisualisation(addonProps)); + addon = temp; + break; + } + case ADDON_SCREENSAVER: + { + AddonPtr temp(new CScreenSaver(addonProps)); + addon = temp; + break; + } + case ADDON_SCRAPER_LIBRARY: + case ADDON_VIZ_LIBRARY: + { + AddonPtr temp(new CAddonLibrary(addonProps)); + addon = temp; + break; + } + default: + return false; + } + + return true; +} + CStdString CAddonMgr::GetAddonsXMLFile() const { CStdString folder; @@ -450,72 +905,5 @@ bool CAddonMgr::GetAddon(const TYPE &type, const TiXmlNode *node, VECADDONPROPS return false; } -/* - * libcpluff interaction - */ - -void CAddonMgr::CPluffFatalError(const char *msg) -{ - CLog::Log(LOGERROR, "ADDONS: CPluffFatalError(%s)", msg); -} - -int cp_to_clog(cp_log_severity_t lvl) -{ - if( lvl == CP_LOG_DEBUG ) - return 0; - else if (lvl == CP_LOG_INFO) - return 1; - else if (lvl == CP_LOG_WARNING) - return 3; - else - return 4; -} -cp_log_severity_t clog_to_cp(int lvl) -{ - if (lvl >= 4) - return CP_LOG_ERROR; - else if (lvl == 3) - return CP_LOG_WARNING; - else if (lvl >= 1) - return CP_LOG_INFO; - else - return CP_LOG_DEBUG; -} - - -void CAddonMgr::CPluffLog(cp_log_severity_t level, const char *msg, const char *apid, void *user_data) -{ - if(!apid) - CLog::Log(LOGDEBUG, "ADDON: '%s'", msg); - else - CLog::Log(LOGDEBUG, "ADDON: '%s' reports '%s'", apid, msg); -} - -bool CAddonMgr::GetExtensions(const TYPE &type, VECADDONS &addons, const CONTENT_TYPE &content) -{ - cp_status_t status; - int num; - CStdString ext_point(TranslateType(type)); - cp_extension_t **exts = m_cpluff->get_extensions_info(m_cp_context, ext_point.c_str(), &status, &num); - for(int i=0; i <num; i++) - { - CStdString id(exts[i]->plugin->identifier); - CStdString version(exts[i]->plugin->version); - AddonProps props(id, type, version); - props.name = CStdString(exts[i]->name); - props.summary = CStdString(exts[i]->plugin->summary); - props.path = CStdString(exts[i]->plugin->plugin_path); - props.icon = props.path + "/default.tbn"; //TODO store icons per ID - addons.push_back(AddonFactory(type, props)); - } - m_cpluff->release_info(m_cp_context, exts); - return addons.size(); -} - -AddonPtr AddonFactory(const AddonProps &props) -{ - return AddonPtr(new T(props)); -} - } /* namespace ADDON */ diff --git a/xbmc/addons/AddonManager.h b/xbmc/addons/AddonManager.h index 91d5af5ec7..5ddda3b006 100644 --- a/xbmc/addons/AddonManager.h +++ b/xbmc/addons/AddonManager.h @@ -28,13 +28,6 @@ #include <vector> #include <map> -// libcpluff -class DllLibCPluff; -extern "C" -{ -#include "../lib/cpluff-0.1.3/libcpluff/cpluff.h" -} - namespace ADDON { typedef std::vector<AddonPtr> VECADDONS; @@ -98,22 +91,12 @@ namespace ADDON void UpdateRepos(); bool ParseRepoXML(const CStdString &path); + void FindAddons(); bool LoadAddonsXML(); bool SaveAddonsXML(); - - // libcpluff - bool GetExtensions(const TYPE &type, VECADDONS &addons, const CONTENT_TYPE &content); - void CPluffFatalError(const char *msg); - void CPluffLog(cp_log_severity_t level, const char *msg, const char *apid, void *user_data); - cp_context_t *m_cp_context; - DllLibCPluff *m_cpluff; - // libcpluff callbacks - static void cp_fatalErrorHandler(const char *msg) { - CAddonMgr::Get()->CPluffFatalError(msg); - } - static void cp_logger(cp_log_severity_t level, const char *msg, const char *apid, void *user_data) { - CAddonMgr::Get()->CPluffLog(level, msg, apid, user_data); - } + bool AddonFromInfoXML(const CStdString &path, AddonPtr &addon); + bool DependenciesMet(AddonPtr &addon); + bool UpdateIfKnown(AddonPtr &addon); /* addons.xml */ CStdString GetAddonsXMLFile() const; @@ -124,7 +107,6 @@ namespace ADDON bool GetAddon(const TYPE &type, const TiXmlNode *node, VECADDONPROPS &addon); CAddonMgr(); - void OnInit(); static CAddonMgr* m_pInstance; static std::map<TYPE, IAddonMgrCallback*> m_managers; MAPADDONS m_addons; diff --git a/xbmc/addons/IAddon.h b/xbmc/addons/IAddon.h index 0145e813f5..c1b65f5eeb 100644 --- a/xbmc/addons/IAddon.h +++ b/xbmc/addons/IAddon.h @@ -53,7 +53,6 @@ namespace ADDON ADDON_SCRAPER, ADDON_SCREENSAVER, ADDON_PLUGIN, - ADDON_VFSDLL, ADDON_VIZ_LIBRARY, // add noninstallable after this and installable before ADDON_SCRAPER_LIBRARY } TYPE; diff --git a/xbmc/lib/cpluff-0.1.3/config.h b/xbmc/lib/cpluff-0.1.3/config.h deleted file mode 100644 index 27dea4f08f..0000000000 --- a/xbmc/lib/cpluff-0.1.3/config.h +++ /dev/null @@ -1,118 +0,0 @@ -/* config.h. Generated from config.h.in by configure. */ -/* config.h.in. Generated from configure.ac by autoheader. */ - -/* The earliest ABI compatible version or undefined */ -#define CP_ABI_COMPATIBILITY "0.1" - -/* File name separator character */ -#define CP_FNAMESEP_CHAR '/' - -/* File name separator string */ -#define CP_FNAMESEP_STR "/" - -/* Shared library extension */ -#define CP_SHREXT ".so" - -/* Multi-threading support type */ -#define CP_THREADS "Posix" - -/* Define to use GNU Libtool libltdl */ -/* #undef DLOPEN_LIBTOOL */ - -/* Define to use Posix dlopen */ -#define DLOPEN_POSIX /**/ - -/* Define to 1 if translation of program messages to the user's native - language is requested. */ -#define ENABLE_NLS 1 - -/* Define to 1 if you have the MacOS X function CFLocaleCopyCurrent in the - CoreFoundation framework. */ -/* #undef HAVE_CFLOCALECOPYCURRENT */ - -/* Define to 1 if you have the MacOS X function CFPreferencesCopyAppValue in - the CoreFoundation framework. */ -/* #undef HAVE_CFPREFERENCESCOPYAPPVALUE */ - -/* Define if the GNU dcgettext() function is already present or preinstalled. - */ -#define HAVE_DCGETTEXT 1 - -/* Define to 1 if you have the <dlfcn.h> header file. */ -#define HAVE_DLFCN_H 1 - -/* Define if the GNU gettext() function is already present or preinstalled. */ -#define HAVE_GETTEXT 1 - -/* Define if you have the iconv() function. */ -/* #undef HAVE_ICONV */ - -/* Define to 1 if you have the <inttypes.h> header file. */ -#define HAVE_INTTYPES_H 1 - -/* Define to 1 if you have the `lstat' function. */ -#define HAVE_LSTAT 1 - -/* Define to 1 if you have the <memory.h> header file. */ -#define HAVE_MEMORY_H 1 - -/* Define to 1 if you have the `stat' function. */ -#define HAVE_STAT 1 - -/* Define to 1 if you have the <stdint.h> header file. */ -#define HAVE_STDINT_H 1 - -/* Define to 1 if you have the <stdlib.h> header file. */ -#define HAVE_STDLIB_H 1 - -/* Define to 1 if you have the <strings.h> header file. */ -#define HAVE_STRINGS_H 1 - -/* Define to 1 if you have the <string.h> header file. */ -#define HAVE_STRING_H 1 - -/* Define to 1 if you have the <sys/stat.h> header file. */ -#define HAVE_SYS_STAT_H 1 - -/* Define to 1 if you have the <sys/types.h> header file. */ -#define HAVE_SYS_TYPES_H 1 - -/* Define to 1 if you have the <unistd.h> header file. */ -#define HAVE_UNISTD_H 1 - -/* Define to the sub-directory in which libtool stores uninstalled libraries. - */ -#define LT_OBJDIR ".libs/" - -/* Define to 1 if your C compiler doesn't accept -c and -o together. */ -/* #undef NO_MINUS_C_MINUS_O */ - -/* Name of package */ -#define PACKAGE "cpluff" - -/* Define to the address where bug reports for this package should be sent. */ -#define PACKAGE_BUGREPORT "johannes.lehtinen@iki.fi" - -/* Define to the full name of this package. */ -#define PACKAGE_NAME "C-Pluff" - -/* Define to the full name and version of this package. */ -#define PACKAGE_STRING "C-Pluff 0.1.3" - -/* Define to the one symbol short name of this package. */ -#define PACKAGE_TARNAME "cpluff" - -/* Define to the home page for this package. */ -#define PACKAGE_URL "" - -/* Define to the version of this package. */ -#define PACKAGE_VERSION "0.1.3" - -/* Define to 1 if you have the ANSI C header files. */ -#define STDC_HEADERS 1 - -/* Version number of package */ -#define VERSION "0.1.3" - -/* Define to empty if `const' does not conform to ANSI C. */ -/* #undef const */ diff --git a/xbmc/lib/cpluff-0.1.3/examples/cpfile/cpfile b/xbmc/lib/cpluff-0.1.3/examples/cpfile/cpfile deleted file mode 100644 index 2470f21b9f..0000000000 --- a/xbmc/lib/cpluff-0.1.3/examples/cpfile/cpfile +++ /dev/null @@ -1,9 +0,0 @@ -#! /bin/sh - -# Copyright 2007 Johannes Lehtinen -# This script is free software; Johannes Lehtinen gives unlimited -# permission to copy, distribute and modify it. - -prefix="/usr/local" -exec_prefix="${prefix}" -exec "${exec_prefix}/bin/cpluff-loader" -c "${exec_prefix}/lib/cpfile/plugins" -s org.c-pluff.examples.cpfile.core "$@" diff --git a/xbmc/lib/cpluff-0.1.3/libcpluff/cpluff.h b/xbmc/lib/cpluff-0.1.3/libcpluff/cpluff.h deleted file mode 100644 index 24d280c0e0..0000000000 --- a/xbmc/lib/cpluff-0.1.3/libcpluff/cpluff.h +++ /dev/null @@ -1,1504 +0,0 @@ -/*------------------------------------------------------------------------- - * C-Pluff, a plug-in framework for C - * Copyright 2007 Johannes Lehtinen - * - * Permission is hereby granted, free of charge, to any person obtaining a - * copy of this software and associated documentation files (the "Software"), - * to deal in the Software without restriction, including without limitation - * the rights to use, copy, modify, merge, publish, distribute, sublicense, - * and/or sell copies of the Software, and to permit persons to whom the - * Software is furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included - * in all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS - * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF - * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. - * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY - * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, - * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE - * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - *-----------------------------------------------------------------------*/ - -/** @file - * C-Pluff C API header file. - * The elements declared here constitute the C-Pluff C API. To use the - * API include this file and link the main program and plug-in runtime - * libraries with the C-Pluff C library. In addition to local declarations, - * this file also includes cpluffdef.h header file for defines common to C - * and C++ API. - */ - -#ifndef CPLUFF_H_ -#define CPLUFF_H_ - -/** - * @defgroup cDefines Defines - * Preprocessor defines. - */ - -#include "cpluffdef.h" - -#ifdef __cplusplus -extern "C" { -#endif /*__cplusplus*/ - - -/* ------------------------------------------------------------------------ - * Defines - * ----------------------------------------------------------------------*/ - -/** - * @def CP_C_API - * @ingroup cDefines - * - * Marks a symbol declaration to be part of the C-Pluff C API. - * This macro declares the symbol to be imported from the C-Pluff library. - */ - -#ifndef CP_C_API -#define CP_C_API CP_IMPORT -#endif - - -/** - * @defgroup cScanFlags Flags for plug-in scanning - * @ingroup cDefines - * - * These constants can be orred together for the flags - * parameter of ::cp_scan_plugins. - */ -/*@{*/ - -/** - * This flag enables upgrades of installed plug-ins by unloading - * the old version and installing the new version. - */ -#define CP_SP_UPGRADE 0x01 - -/** - * This flag causes all plug-ins to be stopped before any - * plug-ins are to be upgraded. - */ -#define CP_SP_STOP_ALL_ON_UPGRADE 0x02 - -/** - * This flag causes all plug-ins to be stopped before any - * plugins are to be installed (also if new version is to be installed - * as part of an upgrade). - */ -#define CP_SP_STOP_ALL_ON_INSTALL 0x04 - -/** - * Setting this flag causes the currently active plug-ins to be restarted - * after all changes to the plug-ins have been made (if they were stopped). - */ -#define CP_SP_RESTART_ACTIVE 0x08 - -/*@}*/ - - -/* ------------------------------------------------------------------------ - * Data types - * ----------------------------------------------------------------------*/ - -/** - * @defgroup cEnums Enumerations - * Constant value enumerations. - */ - -/** - * @defgroup cTypedefs Typedefs - * Typedefs of various kind. - */ - -/** - * @defgroup cStructs Data structures - * Data structure definitions. - */ - - -/* Enumerations */ - -/** - * @ingroup cEnums - * - * An enumeration of status codes returned by API functions. - * Most of the interface functions return a status code. The returned - * status code either indicates successful completion of the operation - * or some specific kind of error. Some functions do not return a status - * code because they never fail. - */ -enum cp_status_t { - - /** - * Operation was performed successfully (equals to zero). - * @showinitializer - */ - CP_OK = 0, - - /** Not enough memory or other operating system resources available */ - CP_ERR_RESOURCE, - - /** The specified object is unknown to the framework */ - CP_ERR_UNKNOWN, - - /** An I/O error occurred */ - CP_ERR_IO, - - /** Malformed plug-in descriptor was encountered when loading a plug-in */ - CP_ERR_MALFORMED, - - /** Plug-in or symbol conflicts with another plug-in or symbol. */ - CP_ERR_CONFLICT, - - /** Plug-in dependencies could not be satisfied. */ - CP_ERR_DEPENDENCY, - - /** Plug-in runtime signaled an error. */ - CP_ERR_RUNTIME - -}; - -/** - * @ingroup cEnums - * An enumeration of possible plug-in states. Plug-in states are controlled - * by @ref cFuncsPlugin "plug-in management functions". Plug-in states can be - * observed by @ref cp_register_plistener "registering" a - * @ref cp_plugin_listener_func_t "plug-in listener function" - * or by calling ::cp_get_plugin_state. - * - * @sa cp_plugin_listener_t - * @sa cp_get_plugin_state - */ -enum cp_plugin_state_t { - - /** - * Plug-in is not installed. No plug-in information has been - * loaded. - */ - CP_PLUGIN_UNINSTALLED, - - /** - * Plug-in is installed. At this stage the plug-in information has - * been loaded but its dependencies to other plug-ins has not yet - * been resolved. The plug-in runtime has not been loaded yet. - * The extension points and extensions provided by the plug-in - * have been registered. - */ - CP_PLUGIN_INSTALLED, - - /** - * Plug-in dependencies have been resolved. At this stage it has - * been verified that the dependencies of the plug-in are satisfied - * and the plug-in runtime has been loaded but it is not active - * (it has not been started or it has been stopped). - * Plug-in is resolved when a dependent plug-in is being - * resolved or before the plug-in is started. Plug-in is put - * back to installed stage if its dependencies are being - * uninstalled. - */ - CP_PLUGIN_RESOLVED, - - /** - * Plug-in is starting. The plug-in has been resolved and the start - * function (if any) of the plug-in runtime is about to be called. - * A plug-in is started when explicitly requested by the main - * program or when a dependent plug-in is about to be started or when - * a dynamic symbol defined by the plug-in is being resolved. This state - * is omitted and the state changes directly from resolved to active - * if the plug-in runtime does not define a start function. - */ - CP_PLUGIN_STARTING, - - /** - * Plug-in is stopping. The stop function (if any) of the plug-in - * runtime is about to be called. A plug-in is stopped if the start - * function fails or when stopping is explicitly - * requested by the main program or when its dependencies are being - * stopped. This state is omitted and the state changes directly from - * active to resolved if the plug-in runtime does not define a stop - * function. - */ - CP_PLUGIN_STOPPING, - - /** - * Plug-in has been successfully started and it has not yet been - * stopped. - */ - CP_PLUGIN_ACTIVE - -}; - -/** - * @ingroup cEnums - * An enumeration of possible message severities for framework logging. These - * constants are used when passing a log message to a - * @ref cp_logger_func_t "logger function" and when - * @ref cp_register_logger "registering" a logger function. - */ -enum cp_log_severity_t { - - /** Used for detailed debug messages */ - CP_LOG_DEBUG, - - /** Used for informational messages such as plug-in state changes */ - CP_LOG_INFO, - - /** Used for messages warning about possible problems */ - CP_LOG_WARNING, - - /** Used for messages reporting errors */ - CP_LOG_ERROR - -}; - -/*@}*/ - - -/* Typedefs */ - -/** - * @defgroup cTypedefsOpaque Opaque types - * @ingroup cTypedefs - * Opaque data type definitions. - */ -/*@{*/ - -/** - * A plug-in context represents the co-operation environment of a set of - * plug-ins from the perspective of a particular participating plug-in or - * the perspective of the main program. It is used as an opaque handle to - * the shared resources but the framework also uses the context to identify - * the plug-in or the main program invoking framework functions. Therefore - * a plug-in should not generally expose its context instance to other - * plug-ins or the main program and neither should the main program - * expose its context instance to plug-ins. The main program creates - * plug-in contexts using ::cp_create_context and plug-ins receive their - * plug-in contexts via @ref cp_plugin_runtime_t::create. - */ -typedef struct cp_context_t cp_context_t; - -/*@}*/ - - /** - * @defgroup cTypedefsShorthand Shorthand type names - * @ingroup cTypedefs - * Shorthand type names for structs and enumerations. - */ -/*@{*/ - -/** A type for cp_plugin_info_t structure. */ -typedef struct cp_plugin_info_t cp_plugin_info_t; - -/** A type for cp_plugin_import_t structure. */ -typedef struct cp_plugin_import_t cp_plugin_import_t; - -/** A type for cp_ext_point_t structure. */ -typedef struct cp_ext_point_t cp_ext_point_t; - -/** A type for cp_extension_t structure. */ -typedef struct cp_extension_t cp_extension_t; - -/** A type for cp_cfg_element_t structure. */ -typedef struct cp_cfg_element_t cp_cfg_element_t; - -/** A type for cp_plugin_runtime_t structure. */ -typedef struct cp_plugin_runtime_t cp_plugin_runtime_t; - -/** A type for cp_status_t enumeration. */ -typedef enum cp_status_t cp_status_t; - -/** A type for cp_plugin_state_t enumeration. */ -typedef enum cp_plugin_state_t cp_plugin_state_t; - -/** A type for cp_log_severity_t enumeration. */ -typedef enum cp_log_severity_t cp_log_severity_t; - -/*@}*/ - -/** - * @defgroup cTypedefsFuncs Callback function types - * @ingroup cTypedefs - * Typedefs for client supplied callback functions. - */ -/*@{*/ - -/** - * A listener function called synchronously after a plugin state change. - * The function should return promptly. - * @ref cFuncsInit "Library initialization", - * @ref cFuncsContext "plug-in context management", - * @ref cFuncsPlugin "plug-in management", - * listener registration (::cp_register_plistener and ::cp_unregister_plistener) - * and @ref cFuncsSymbols "dynamic symbol" functions must not be called from - * within a plug-in listener invocation. Listener functions are registered - * using ::cp_register_plistener. - * - * @param plugin_id the plug-in identifier - * @param old_state the old plug-in state - * @param new_state the new plug-in state - * @param user_data the user data pointer supplied at listener registration - */ -typedef void (*cp_plugin_listener_func_t)(const char *plugin_id, cp_plugin_state_t old_state, cp_plugin_state_t new_state, void *user_data); - -/** - * A logger function called to log selected plug-in framework messages. The - * messages may be localized. Plug-in framework API functions must not - * be called from within a logger function invocation. In a multi-threaded - * environment logger function invocations are serialized by the framework. - * Logger functions are registered using ::cp_register_logger. - * - * @param severity the severity of the message - * @param msg the message to be logged, possibly localized - * @param apid the identifier of the activating plug-in or NULL for the main program - * @param user_data the user data pointer given when the logger was registered - */ -typedef void (*cp_logger_func_t)(cp_log_severity_t severity, const char *msg, const char *apid, void *user_data); - -/** - * A fatal error handler for handling unrecoverable errors. If the error - * handler returns then the framework aborts the program. Plug-in framework - * API functions must not be called from within a fatal error handler - * invocation. The fatal error handler function is set using - * ::cp_set_fatal_error_handler. - * - * @param msg the possibly localized error message - */ -typedef void (*cp_fatal_error_func_t)(const char *msg); - -/** - * A run function registered by a plug-in to perform work. - * The run function should perform a finite chunk of work and it should - * return a non-zero value if there is more work to be done. Run functions - * are registered using ::cp_run_function and the usage is discussed in - * more detail in the @ref cFuncsPluginExec "serial execution" section. - * - * @param plugin_data the plug-in instance data pointer - * @return non-zero if there is more work to be done or zero if finished - */ -typedef int (*cp_run_func_t)(void *plugin_data); - -/*@}*/ - - -/* Data structures */ - -/** - * @ingroup cStructs - * Plug-in information structure captures information about a plug-in. This - * information can be loaded from a plug-in descriptor using - * ::cp_load_plugin_descriptor. Information about installed plug-ins can - * be obtained using ::cp_get_plugin_info and ::cp_get_plugins_info. This - * structure corresponds to the @a plugin element in a plug-in descriptor. - */ -struct cp_plugin_info_t { - - /** - * The obligatory unique identifier of the plugin. A recommended way - * to generate identifiers is to use domain name service (DNS) prefixes - * (for example, org.cpluff.ExamplePlugin) to avoid naming conflicts. This - * corresponds to the @a id attribute of the @a plugin element in a plug-in - * descriptor. - */ - char *identifier; - - /** - * A mandatory summary of the addon's features. - * This corresponds to the @a summary attribute of the @a plugin element in - * a plug-in descriptor. - */ - char *summary; - - /** - * An optional plug-in name. NULL if not available. The plug-in name is - * intended only for display purposes and the value can be localized. - * This corresponds to the @a name attribute of the @a plugin element in - * a plug-in descriptor. - */ - char *name; - - /** - * An optional release version string. NULL if not available. This - * corresponds to the @a version attribute of the @a plugin element in - * a plug-in descriptor. - */ - char *version; - - /** - * An optional provider name. NULL if not available. This is the name of - * the author or the organization providing the plug-in. The - * provider name is intended only for display purposes and the value can - * be localized. This corresponds to the @a provider-name attribute of the - * @a plugin element in a plug-in descriptor. - */ - char *provider_name; - - /** - * Path of the plugin directory or NULL if not known. This is the - * (absolute or relative) path to the plug-in directory containing - * plug-in data and the plug-in runtime library. The value corresponds - * to the path specified to ::cp_load_plugin_descriptor when loading - * the plug-in. - */ - char *plugin_path; - - /** - * Optional ABI compatibility information. NULL if not available. - * This is the earliest version of the plug-in interface the current - * interface is backwards compatible with when it comes to the application - * binary interface (ABI) of the plug-in. That is, plug-in clients compiled against - * any plug-in interface version from @a abi_bw_compatibility to - * @ref version (inclusive) can use the current version of the plug-in - * binary. This describes binary or runtime compatibility. - * The value corresponds to the @a abi-compatibility - * attribute of the @a backwards-compatibility element in a plug-in descriptor. - */ - char *abi_bw_compatibility; - - /** - * Optional API compatibility information. NULL if not available. - * This is the earliest version of the plug-in interface the current - * interface is backwards compatible with when it comes to the - * application programming interface (API) of the plug-in. That is, - * plug-in clients written for any plug-in interface version from - * @a api_bw_compatibility to @ref version (inclusive) can be compiled - * against the current version of the plug-in API. This describes - * source or build time compatibility. The value corresponds to the - * @a api-compatibility attribute of the @a backwards-compatibility - * element in a plug-in descriptor. - */ - char *api_bw_compatibility; - - /** - * Optional C-Pluff version requirement. NULL if not available. - * This is the version of the C-Pluff implementation the plug-in was - * compiled against. It is used to determine the compatibility of - * the plug-in runtime and the linked in C-Pluff implementation. Any - * C-Pluff version that is backwards compatible on binary level with the - * specified version fulfills the requirement. - */ - char *req_cpluff_version; - - /** Number of import entries in the @ref imports array. */ - unsigned int num_imports; - - /** - * An array of @ref num_imports import entries. These correspond to - * @a import elements in a plug-in descriptor. - */ - cp_plugin_import_t *imports; - - /** - * The base name of the plug-in runtime library, or NULL if none. - * A platform specific prefix (for example, "lib") and an extension - * (for example, ".dll" or ".so") may be added to the base name. - * This corresponds to the @a library attribute of the - * @a runtime element in a plug-in descriptor. - */ - char *runtime_lib_name; - - /** - * The symbol pointing to the plug-in runtime function information or - * NULL if none. The symbol with this name should point to an instance of - * @ref cp_plugin_runtime_t structure. This corresponds to the - * @a funcs attribute of the @a runtime element in a plug-in descriptor. - */ - char *runtime_funcs_symbol; - - /** Number of extension points in @ref ext_points array. */ - unsigned int num_ext_points; - - /** - * An array of @ref num_ext_points extension points provided by this - * plug-in. These correspond to @a extension-point elements in a - * plug-in descriptor. - */ - cp_ext_point_t *ext_points; - - /** Number of extensions in @ref extensions array. */ - unsigned int num_extensions; - - /** - * An array of @ref num_extensions extensions provided by this - * plug-in. These correspond to @a extension elements in a plug-in - * descriptor. - */ - cp_extension_t *extensions; - -}; - -/** - * @ingroup cStructs - * Information about plug-in import. Plug-in import structures are - * contained in @ref cp_plugin_info_t::imports. - */ -struct cp_plugin_import_t { - - /** - * The identifier of the imported plug-in. This corresponds to the - * @a plugin attribute of the @a import element in a plug-in descriptor. - */ - char *plugin_id; - - /** - * An optional version requirement. NULL if no version requirement. - * This is the version of the imported plug-in the importing plug-in was - * compiled against. Any version of the imported plug-in that is - * backwards compatible with this version fulfills the requirement. - * This corresponds to the @a if-version attribute of the @a import - * element in a plug-in descriptor. - */ - char *version; - - /** - * Is this import optional. 1 for optional and 0 for mandatory import. - * An optional import causes the imported plug-in to be started if it is - * available but does not stop the importing plug-in from starting if the - * imported plug-in is not available. If the imported plug-in is available - * but the API version conflicts with the API version requirement then the - * importing plug-in fails to start. This corresponds to the @a optional - * attribute of the @a import element in a plug-in descriptor. - */ - int optional; -}; - -/** - * @ingroup cStructs - * Extension point structure captures information about an extension - * point. Extension point structures are contained in - * @ref cp_plugin_info_t::ext_points. - */ -struct cp_ext_point_t { - - /** - * A pointer to plug-in information containing this extension point. - * This reverse pointer is provided to make it easy to get information - * about the plug-in which is hosting a particular extension point. - */ - cp_plugin_info_t *plugin; - - /** - * The local identifier uniquely identifying the extension point within the - * host plug-in. This corresponds to the @name id attribute of an - * @a extension-point element in a plug-in descriptor. - */ - char *local_id; - - /** - * The unique identifier of the extension point. This is automatically - * constructed by concatenating the identifier of the host plug-in and - * the local identifier of the extension point. - */ - char *identifier; - - /** - * An optional extension point name. NULL if not available. The extension - * point name is intended for display purposes only and the value can be - * localized. This corresponds to the @a name attribute of - * an @a extension-point element in a plug-in descriptor. - */ - char *name; - - /** - * An optional path to the extension schema definition. - * NULL if not available. The path is relative to the plug-in directory. - * This corresponds to the @a schema attribute - * of an @a extension-point element in a plug-in descriptor. - */ - char *schema_path; -}; - -/** - * @ingroup cStructs - * Extension structure captures information about an extension. Extension - * structures are contained in @ref cp_plugin_info_t::extensions. - */ -struct cp_extension_t { - - /** - * A pointer to plug-in information containing this extension. - * This reverse pointer is provided to make it easy to get information - * about the plug-in which is hosting a particular extension. - */ - cp_plugin_info_t *plugin; - - /** - * The unique identifier of the extension point this extension is - * attached to. This corresponds to the @a point attribute of an - * @a extension element in a plug-in descriptor. - */ - char *ext_point_id; - - /** - * An optional local identifier uniquely identifying the extension within - * the host plug-in. NULL if not available. This corresponds to the - * @a id attribute of an @a extension element in a plug-in descriptor. - */ - char *local_id; - - /** - * An optional unique identifier of the extension. NULL if not available. - * This is automatically constructed by concatenating the identifier - * of the host plug-in and the local identifier of the extension. - */ - char *identifier; - - /** - * An optional extension name. NULL if not available. The extension name - * is intended for display purposes only and the value can be localized. - * This corresponds to the @a name attribute - * of an @a extension element in a plug-in descriptor. - **/ - char *name; - - /** - * Extension configuration starting with the extension element. - * This includes extension configuration information as a tree of - * configuration elements. These correspond to the @a extension - * element and its contents in a plug-in descriptor. - */ - cp_cfg_element_t *configuration; -}; - -/** - * @ingroup cStructs - * A configuration element contains configuration information for an - * extension. Utility functions ::cp_lookup_cfg_element and - * ::cp_lookup_cfg_value can be used for traversing the tree of - * configuration elements. Pointer to the root configuration element is - * stored at @ref cp_extension_t::configuration and others are contained as - * @ref cp_cfg_element_t::children "children" of parent elements. - */ -struct cp_cfg_element_t { - - /** - * The name of the configuration element. This corresponds to the name of - * the element in a plug-in descriptor. - */ - char *name; - - /** Number of attribute name, value pairs in the @ref atts array. */ - unsigned int num_atts; - - /** - * An array of pointers to alternating attribute names and values. - * Attribute values can be localized. - */ - char **atts; - - /** - * An optional value of this configuration element. NULL if not available. - * The value can be localized. This corresponds to the - * text contents of the element in a plug-in descriptor. - */ - char *value; - - /** A pointer to the parent element or NULL if this is a root element. */ - cp_cfg_element_t *parent; - - /** The index of this element among its siblings (0-based). */ - unsigned int index; - - /** Number of children in the @ref children array. */ - unsigned int num_children; - - /** - * An array of @ref num_children childrens of this element. These - * correspond to child elements in a plug-in descriptor. - */ - cp_cfg_element_t *children; -}; - -/** - * @ingroup cStructs - * Container for plug-in runtime information. A plug-in runtime defines a - * static instance of this structure to pass information to the plug-in - * framework. The plug-in framework then uses the information - * to create and control plug-in instances. The symbol pointing - * to the runtime information instance is named by the @a funcs - * attribute of the @a runtime element in a plug-in descriptor. - * - * The following graph displays how these functions are used to control the - * state of the plug-in instance. - * - * @dot - * digraph lifecycle { - * rankdir=LR; - * node [shape=ellipse, fontname=Helvetica, fontsize=10]; - * edge [fontname=Helvetica, fontsize=10]; - * none [label="no instance"]; - * inactive [label="inactive"]; - * active [label="active"]; - * none -> inactive [label="create", URL="\ref create"]; - * inactive -> active [label="start", URL="\ref start"]; - * active -> inactive [label="stop", URL="\ref stop"]; - * inactive -> none [label="destroy", URL="\ref destroy"]; - * } - * @enddot - */ -struct cp_plugin_runtime_t { - - /** - * An initialization function called to create a new plug-in - * runtime instance. The initialization function initializes and - * returns an opaque plug-in instance data pointer which is then - * passed on to other control functions. This data pointer should - * be used to access plug-in instance specific data. For example, - * the context reference must be stored as part of plug-in instance - * data if the plug-in runtime needs it. On failure, the function - * must return NULL. - * - * C-pluff API functions must not be called from within a create - * function invocation and symbols from imported plug-ins must not be - * used because they may not available yet. - * - * @param ctx the plug-in context of the new plug-in instance - * @return an opaque pointer to plug-in instance data or NULL on failure - */ - void *(*create)(cp_context_t *ctx); - - /** - * A start function called to start a plug-in instance. - * The start function must return zero (CP_OK) on success and non-zero - * on failure. If the start fails then the stop function (if any) is - * called to clean up plug-in state. @ref cFuncsInit "Library initialization", - * @ref cFuncsContext "plug-in context management" and - * @ref cFuncsPlugin "plug-in management" functions must not be - * called from within a start function invocation. The start function - * pointer can be NULL if the plug-in runtime does not have a start - * function. - * - * The start function implementation should set up plug-in and return - * promptly. If there is further work to be done then a plug-in can - * start a thread or register a run function using ::cp_run_function. - * Symbols from imported plug-ins are guaranteed to be available for - * the start function. - * - * @param data an opaque pointer to plug-in instance data - * @return non-zero on success, or zero on failure - */ - int (*start)(void *data); - - /** - * A stop function called to stop a plugin instance. - * This function must cease all plug-in runtime activities. - * @ref cFuncsInit "Library initialization", - * @ref cFuncsContext "plug-in context management", - * @ref cFuncsPlugin "plug-in management" - * functions, ::cp_run_function and ::cp_resolve_symbol must not be called - * from within a stop function invocation. The stop function pointer can - * be NULL if the plug-in runtime does not have a stop function. - * It is guaranteed that no run functions registered by the plug-in are - * called simultaneously or after the call to the stop function. - * - * The stop function should release any external resources hold by - * the plug-in. Dynamically resolved symbols are automatically released - * and dynamically defined symbols and registered run functions are - * automatically unregistered after the call to stop function. - * Resolved external symbols are still available for the stop function - * and symbols provided by the plug-in should remain available - * after the call to stop function (although functionality might be - * limited). Final cleanup can be safely done in the destroy function. - * - * @param data an opaque pointer to plug-in instance data - */ - void (*stop)(void *data); - - /** - * A destroy function called to destroy a plug-in instance. - * This function should release any plug-in instance data. - * The plug-in is stopped before this function is called. - * C-Pluff API functions must not be called from within a destroy - * function invocation and symbols from imported plug-ins must not be - * used because they may not be available anymore. Correspondingly, - * it is guaranteed that the symbols provided by the plug-in are not - * used by other plug-ins when destroy function has been called. - * - * @param data an opaque pointer to plug-in instance data - */ - void (*destroy)(void *data); - -}; - -/*@}*/ - - -/* ------------------------------------------------------------------------ - * Function declarations - * ----------------------------------------------------------------------*/ - -/** - * @defgroup cFuncs Functions - * - * C API functions. The C-Pluff C API functions and - * any data exposed by them are generally thread-safe if the library has been - * compiled with multi-threading support. The - * @ref cFuncsInit "framework initialization functions" - * are exceptions, they are not thread-safe. - */ - -/** - * @defgroup cFuncsFrameworkInfo Framework information - * @ingroup cFuncs - * - * These functions can be used to query runtime information about the - * linked in C-Pluff implementation. They may be used by the main program or - * by a plug-in runtime. - */ -/*@{*/ - -/** - * Returns the release version string of the linked in C-Pluff - * implementation. - * - * @return the C-Pluff release version string - */ -CP_C_API const char *cp_get_version(void) CP_GCC_PURE; - -/** - * Returns the canonical host type associated with the linked in C-Pluff implementation. - * A multi-platform installation manager could use this information to - * determine what plug-in versions to install. - * - * @return the canonical host type - */ -CP_C_API const char *cp_get_host_type(void) CP_GCC_PURE; - -/*@}*/ - - -/** - * @defgroup cFuncsInit Framework initialization - * @ingroup cFuncs - * - * These functions are used for framework initialization. - * They are intended to be used by the main program. These functions are - * not thread safe. - */ -/*@{*/ - -/** - * Sets the fatal error handler called on non-recoverable errors. The default - * error handler prints the error message out to standard error and aborts - * the program. If the user specified error handler returns then the framework - * will abort the program. Setting NULL error handler will restore the default - * handler. This function is not thread-safe and it should be called - * before initializing the framework to catch all fatal errors. - * - * @param error_handler the fatal error handler - */ -CP_C_API void cp_set_fatal_error_handler(cp_fatal_error_func_t error_handler); - -/** - * Initializes the plug-in framework. This function must be called - * by the main program before calling any other plug-in framework - * functions except @ref cFuncsFrameworkInfo "framework information" functions and - * ::cp_set_fatal_error_handler. This function may be - * called several times but it is not thread-safe. Library resources - * should be released by calling ::cp_destroy when the framework is - * not needed anymore. - * - * Additionally, to enable localization support, the main program should - * set the current locale using @code setlocale(LC_ALL, "") @endcode - * before calling this function. - * - * @return @ref CP_OK (zero) on success or error code on failure - */ -CP_C_API cp_status_t cp_init(void); - -/** - * Destroys the plug-in framework and releases the resources used by it. - * The plug-in framework is only destroyed after this function has - * been called as many times as ::cp_init. This function is not - * thread-safe. Plug-in framework functions other than ::cp_init, - * ::cp_get_framework_info and ::cp_set_fatal_error_handler - * must not be called after the plug-in framework has been destroyed. - * All contexts are destroyed and all data references returned by the - * framework become invalid. - */ -CP_C_API void cp_destroy(void); - -/*@}*/ - - -/** - * @defgroup cFuncsContext Plug-in context initialization - * @ingroup cFuncs - * - * These functions are used to manage plug-in contexts from the main - * program perspective. They are not intended to be used by a plug-in runtime. - * From the main program perspective a plug-in context is a container for - * installed plug-ins. There can be several plug-in context instances if there - * are several independent sets of plug-ins. However, different plug-in - * contexts are not very isolated from each other in practice because the - * global symbols exported by a plug-in runtime in one context are visible to - * all plug-ins in all context instances. - */ -/*@{*/ - -/** - * Creates a new plug-in context which can be used as a container for plug-ins. - * Plug-ins are loaded and installed into a specific context. The main - * program may have more than one plug-in context but the plug-ins that - * interact with each other should be placed in the same context. The - * resources associated with the context are released by calling - * ::cp_destroy_context when the context is not needed anymore. Remaining - * contexts are automatically destroyed when the plug-in framework is - * destroyed. - * - * @param status pointer to the location where status code is to be stored, or NULL - * @return the newly created plugin context, or NULL on failure - */ -CP_C_API cp_context_t * cp_create_context(cp_status_t *status); - -/** - * Destroys the specified plug-in context and releases the associated resources. - * Stops and uninstalls all plug-ins in the context. The context must not be - * accessed after calling this function. - * - * @param ctx the context to be destroyed - */ -CP_C_API void cp_destroy_context(cp_context_t *ctx) CP_GCC_NONNULL(1); - -/** - * Registers a plug-in collection with a plug-in context. A plug-in collection - * is a directory that has plug-ins as its immediate subdirectories. The - * plug-in context will scan the directory when ::cp_scan_plugins is called. - * Returns @ref CP_OK if the directory has already been registered. A plug-in - * collection can be unregistered using ::cp_unregister_pcollection or - * ::cp_unregister_pcollections. - * - * @param ctx the plug-in context - * @param dir the directory - * @return @ref CP_OK (zero) on success or @ref CP_ERR_RESOURCE if insufficient memory - */ -CP_C_API cp_status_t cp_register_pcollection(cp_context_t *ctx, const char *dir) CP_GCC_NONNULL(1, 2); - -/** - * Unregisters a previously registered plug-in collection from a - * plug-in context. Plug-ins already loaded from the collection are not - * affected. Does nothing if the directory has not been registered. - * Plug-in collections can be registered using ::cp_register_pcollection. - * - * @param ctx the plug-in context - * @param dir the previously registered directory - */ -CP_C_API void cp_unregister_pcollection(cp_context_t *ctx, const char *dir) CP_GCC_NONNULL(1, 2); - -/** - * Unregisters all plug-in collections from a plug-in context. - * Plug-ins already loaded are not affected. Plug-in collections can - * be registered using ::cp_register_pcollection. - * - * @param ctx the plug-in context - */ -CP_C_API void cp_unregister_pcollections(cp_context_t *ctx) CP_GCC_NONNULL(1); - -/*@}*/ - - -/** - * @defgroup cFuncsLogging Logging - * @ingroup cFuncs - * - * These functions can be used to receive and emit log messages related - * to a particular plug-in context. They can be used by the main program - * or by a plug-in runtime. - */ -/*@{*/ - -/** - * Registers a logger with a plug-in context or updates the settings of a - * registered logger. The logger will receive selected log messages. - * If the specified logger is not yet known, a new logger registration - * is made, otherwise the settings for the existing logger are updated. - * The logger can be unregistered using ::cp_unregister_logger and it is - * automatically unregistered when the registering plug-in is stopped or - * when the context is destroyed. - * - * @param ctx the plug-in context to log - * @param logger the logger function to be called - * @param user_data the user data pointer passed to the logger - * @param min_severity the minimum severity of messages passed to logger - * @return @ref CP_OK (zero) on success or @ref CP_ERR_RESOURCE if insufficient memory - */ -CP_C_API cp_status_t cp_register_logger(cp_context_t *ctx, cp_logger_func_t logger, void *user_data, cp_log_severity_t min_severity) CP_GCC_NONNULL(1, 2); - -/** - * Removes a logger registration. - * - * @param ctx the plug-in context - * @param logger the logger function to be unregistered - */ -CP_C_API void cp_unregister_logger(cp_context_t *ctx, cp_logger_func_t logger) CP_GCC_NONNULL(1, 2); - -/** - * Emits a new log message. - * - * @param ctx the plug-in context - * @param severity the severity of the event - * @param msg the log message (possibly localized) - */ -CP_C_API void cp_log(cp_context_t *ctx, cp_log_severity_t severity, const char *msg) CP_GCC_NONNULL(1, 3); - -/** - * Returns whether a message of the specified severity would get logged. - * - * @param ctx the plug-in context - * @param severity the target logging severity - * @return whether a message of the specified severity would get logged - */ -CP_C_API int cp_is_logged(cp_context_t *ctx, cp_log_severity_t severity) CP_GCC_NONNULL(1); - -/*@}*/ - - -/** - * @defgroup cFuncsPlugin Plug-in management - * @ingroup cFuncs - * - * These functions can be used to manage plug-ins. They are intended to be - * used by the main program. - */ -/*@{*/ - -/** - * Loads a plug-in descriptor from the specified plug-in installation - * path and returns information about the plug-in. The plug-in descriptor - * is validated during loading. Possible loading errors are reported via the - * specified plug-in context. The plug-in is not installed to the context. - * If operation fails or the descriptor - * is invalid then NULL is returned. The caller must release the returned - * information by calling ::cp_release_plugin_info when it does not - * need the information anymore, typically after installing the plug-in. - * The returned plug-in information must not be modified. - * - * @param ctx the plug-in context - * @param path the installation path of the plug-in - * @param status a pointer to the location where status code is to be stored, or NULL - * @return pointer to the information structure or NULL if error occurs - */ -CP_C_API cp_plugin_info_t * cp_load_plugin_descriptor(cp_context_t *ctx, const char *path, cp_status_t *status) CP_GCC_NONNULL(1, 2); - -/** - * Installs the plug-in described by the specified plug-in information - * structure to the specified plug-in context. The plug-in information - * must have been loaded using ::cp_load_plugin_descriptor with the same - * plug-in context. - * The installation fails on #CP_ERR_CONFLICT if the context already - * has an installed plug-in with the same plug-in identifier. Installation - * also fails if the plug-in tries to install an extension point which - * conflicts with an already installed extension point. - * The plug-in information must not be modified but it is safe to call - * ::cp_release_plugin_info after the plug-in has been installed. - * - * @param ctx the plug-in context - * @param pi plug-in information structure - * @return @ref CP_OK (zero) on success or an error code on failure - */ -CP_C_API cp_status_t cp_install_plugin(cp_context_t *ctx, cp_plugin_info_t *pi) CP_GCC_NONNULL(1, 2); - -/** - * Scans for plug-ins in the registered plug-in directories, installing - * new plug-ins and upgrading installed plug-ins. This function can be used to - * initially load the plug-ins and to later rescan for new plug-ins. - * - * When several versions of the same plug-in is available the most recent - * version will be installed. The upgrade behavior depends on the specified - * @ref cScanFlags "flags". If #CP_SP_UPGRADE is set then upgrades to installed plug-ins are - * allowed. The old version is unloaded and the new version installed instead. - * If #CP_SP_STOP_ALL_ON_UPGRADE is set then all active plug-ins are stopped - * if any plug-ins are to be upgraded. If #CP_SP_STOP_ALL_ON_INSTALL is set then - * all active plug-ins are stopped if any plug-ins are to be installed or - * upgraded. Finally, if #CP_SP_RESTART_ACTIVE is set all currently active - * plug-ins will be restarted after the changes (if they were stopped). - * - * When removing plug-in files from the plug-in directories, the - * plug-ins to be removed must be first unloaded. Therefore this function - * does not check for removed plug-ins. - * - * @param ctx the plug-in context - * @param flags the bitmask of flags - * @return @ref CP_OK (zero) on success or an error code on failure - */ -CP_C_API cp_status_t cp_scan_plugins(cp_context_t *ctx, int flags) CP_GCC_NONNULL(1); - -/** - * Starts a plug-in. Also starts any imported plug-ins. If the plug-in is - * already starting then - * this function blocks until the plug-in has started or failed to start. - * If the plug-in is already active then this function returns immediately. - * If the plug-in is stopping then this function blocks until the plug-in - * has stopped and then starts the plug-in. - * - * @param ctx the plug-in context - * @param id identifier of the plug-in to be started - * @return @ref CP_OK (zero) on success or an error code on failure - */ -CP_C_API cp_status_t cp_start_plugin(cp_context_t *ctx, const char *id) CP_GCC_NONNULL(1, 2); - -/** - * Stops a plug-in. First stops any dependent plug-ins that are currently - * active. Then stops the specified plug-in. If the plug-in is already - * stopping then this function blocks until the plug-in has stopped. If the - * plug-in is already stopped then this function returns immediately. If the - * plug-in is starting then this function blocks until the plug-in has - * started (or failed to start) and then stops the plug-in. - * - * @param ctx the plug-in context - * @param id identifier of the plug-in to be stopped - * @return @ref CP_OK (zero) on success or @ref CP_ERR_UNKNOWN if unknown plug-in - */ -CP_C_API cp_status_t cp_stop_plugin(cp_context_t *ctx, const char *id) CP_GCC_NONNULL(1, 2); - -/** - * Stops all active plug-ins. - * - * @param ctx the plug-in context - */ -CP_C_API void cp_stop_plugins(cp_context_t *ctx) CP_GCC_NONNULL(1); - -/** - * Uninstalls the specified plug-in. The plug-in is first stopped if it is active. - * Then uninstalls the plug-in and any dependent plug-ins. - * - * @param ctx the plug-in context - * @param id identifier of the plug-in to be unloaded - * @return @ref CP_OK (zero) on success or @ref CP_ERR_UNKNOWN if unknown plug-in - */ -CP_C_API cp_status_t cp_uninstall_plugin(cp_context_t *ctx, const char *id) CP_GCC_NONNULL(1, 2); - -/** - * Uninstalls all plug-ins. All plug-ins are first stopped and then - * uninstalled. - * - * @param ctx the plug-in context - */ -CP_C_API void cp_uninstall_plugins(cp_context_t *ctx) CP_GCC_NONNULL(1); - -/*@}*/ - - -/** - * @defgroup cFuncsPluginInfo Plug-in and extension information - * @ingroup cFuncs - * - * These functions can be used to query information about the installed - * plug-ins, extension points and extensions or to listen for plug-in state - * changes. They may be used by the main program or by a plug-in runtime. - */ -/*@{*/ - -/** - * Returns static information about the specified plug-in. The returned - * information must not be modified and the caller must - * release the information by calling ::cp_release_info when the - * information is not needed anymore. When a plug-in runtime calls this - * function it may pass NULL as the identifier to get information about the - * plug-in itself. - * - * @param ctx the plug-in context - * @param id identifier of the plug-in to be examined or NULL for self - * @param status a pointer to the location where status code is to be stored, or NULL - * @return pointer to the information structure or NULL on failure - */ -CP_C_API cp_plugin_info_t * cp_get_plugin_info(cp_context_t *ctx, const char *id, cp_status_t *status) CP_GCC_NONNULL(1); - -/** - * Returns static information about the installed plug-ins. The returned - * information must not be modified and the caller must - * release the information by calling ::cp_release_info when the - * information is not needed anymore. - * - * @param ctx the plug-in context - * @param status a pointer to the location where status code is to be stored, or NULL - * @param num a pointer to the location where the number of returned plug-ins is stored, or NULL - * @return pointer to a NULL-terminated list of pointers to plug-in information - * or NULL on failure - */ -CP_C_API cp_plugin_info_t ** cp_get_plugins_info(cp_context_t *ctx, cp_status_t *status, int *num) CP_GCC_NONNULL(1); - -/** - * Returns static information about the currently installed extension points. - * The returned information must not be modified and the caller must - * release the information by calling ::cp_release_info when the - * information is not needed anymore. - * - * @param ctx the plug-in context - * @param status a pointer to the location where status code is to be stored, or NULL - * @param num filled with the number of returned extension points, if non-NULL - * @return pointer to a NULL-terminated list of pointers to extension point - * information or NULL on failure - */ -CP_C_API cp_ext_point_t ** cp_get_ext_points_info(cp_context_t *ctx, cp_status_t *status, int *num) CP_GCC_NONNULL(1); - -/** - * Returns static information about the currently installed extension points. - * The returned information must not be modified and the caller must - * release the information by calling ::cp_release_info when the - * information is not needed anymore. - * - * @param ctx the plug-in context - * @param extpt_id the extension point identifier or NULL for all extensions - * @param status a pointer to the location where status code is to be stored, or NULL - * @param num a pointer to the location where the number of returned extension points is to be stored, or NULL - * @return pointer to a NULL-terminated list of pointers to extension - * information or NULL on failure - */ -CP_C_API cp_extension_t ** cp_get_extensions_info(cp_context_t *ctx, const char *extpt_id, cp_status_t *status, int *num) CP_GCC_NONNULL(1); - -/** - * Releases a previously obtained reference counted information object. The - * documentation for functions returning such information refers - * to this function. The information must not be accessed after it has - * been released. The framework uses reference counting to deallocate - * the information when it is not in use anymore. - * - * @param ctx the plug-in context - * @param info the information to be released - */ -CP_C_API void cp_release_info(cp_context_t *ctx, void *info) CP_GCC_NONNULL(1, 2); - -/** - * Returns the current state of the specified plug-in. Returns - * #CP_PLUGIN_UNINSTALLED if the specified plug-in identifier is unknown. - * - * @param ctx the plug-in context - * @param id the plug-in identifier - * @return the current state of the plug-in - */ -CP_C_API cp_plugin_state_t cp_get_plugin_state(cp_context_t *ctx, const char *id) CP_GCC_NONNULL(1, 2); - -/** - * Registers a plug-in listener with a plug-in context. The listener is called - * synchronously immediately after a plug-in state change. There can be several - * listeners registered with the same context. A plug-in listener can be - * unregistered using ::cp_unregister_plistener and it is automatically - * unregistered when the registering plug-in is stopped or when the context - * is destroyed. - * - * @param ctx the plug-in context - * @param listener the plug-in listener to be added - * @param user_data user data pointer supplied to the listener - * @return @ref CP_OK (zero) on success or @ref CP_ERR_RESOURCE if out of resources - */ -CP_C_API cp_status_t cp_register_plistener(cp_context_t *ctx, cp_plugin_listener_func_t listener, void *user_data) CP_GCC_NONNULL(1, 2); - -/** - * Removes a plug-in listener from a plug-in context. Does nothing if the - * specified listener was not registered. - * - * @param ctx the plug-in context - * @param listener the plug-in listener to be removed - */ -CP_C_API void cp_unregister_plistener(cp_context_t *ctx, cp_plugin_listener_func_t listener) CP_GCC_NONNULL(1, 2); - -/** - * Traverses a configuration element tree and returns the specified element. - * The target element is specified by a base element and a relative path from - * the base element to the target element. The path includes element names - * separated by slash '/'. Two dots ".." can be used to designate a parent - * element. Returns NULL if the specified element does not exist. If there are - * several subelements with the same name, this function chooses the first one - * when traversing the tree. - * - * @param base the base configuration element - * @param path the path to the target element - * @return the target element or NULL if nonexisting - */ -CP_C_API cp_cfg_element_t * cp_lookup_cfg_element(cp_cfg_element_t *base, const char *path) CP_GCC_PURE CP_GCC_NONNULL(1, 2); - -/** - * Traverses a configuration element tree and returns the value of the - * specified element or attribute. The target element or attribute is specified - * by a base element and a relative path from the base element to the target - * element or attributes. The path includes element names - * separated by slash '/'. Two dots ".." can be used to designate a parent - * element. The path may end with '@' followed by an attribute name - * to select an attribute. Returns NULL if the specified element or attribute - * does not exist or does not have a value. If there are several subelements - * with the same name, this function chooses the first one when traversing the - * tree. - * - * @param base the base configuration element - * @param path the path to the target element - * @return the value of the target element or attribute or NULL - */ -CP_C_API char * cp_lookup_cfg_value(cp_cfg_element_t *base, const char *path) CP_GCC_PURE CP_GCC_NONNULL(1, 2); - -/*@}*/ - - -/** - * @defgroup cFuncsPluginExec Plug-in execution - * @ingroup cFuncs - * - * These functions support a plug-in controlled execution model. Started plug-ins can - * use ::cp_run_function to register @ref cp_run_func_t "a run function" which is called when the - * main program calls ::cp_run_plugins or ::cp_run_plugins_step. A run - * function should do a finite chunk of work and then return telling whether - * there is more work to be done. A run function is automatically unregistered - * when the plug-in is stopped. Run functions make it possible for plug-ins - * to control the flow of execution or they can be used as a coarse - * way of task switching if there is no multi-threading support. - * - * The C-Pluff distribution includes a generic main program, cpluff-loader, - * which only acts as a plug-in loader. It loads and starts up the - * specified plug-ins, passing any additional startup arguments to them and - * then just calls run functions of the plug-ins. This - * makes it is possible to put all the application specific logic in - * plug-ins. Application does not necessarily need a main program of its own. - * - * It is also safe, from framework perspective, to call these functions from - * multiple threads. Run functions may then be executed in parallel threads. - */ -/*@{*/ - -/** - * Registers a new run function. The plug-in instance data pointer is given to - * the run function as a parameter. The run function must return zero if it has - * finished its work or non-zero if it should be called again later. The run - * function is unregistered when it returns zero. Plug-in framework functions - * stopping the registering plug-in must not be called from within a run - * function. This function does nothing if the specified run - * function is already registered for the calling plug-in instance. - * - * @param ctx the plug-in context of the registering plug-in - * @param runfunc the run function to be registered - * @return @ref CP_OK (zero) on success or an error code on failure - */ -CP_C_API cp_status_t cp_run_function(cp_context_t *ctx, cp_run_func_t runfunc) CP_GCC_NONNULL(1, 2); - -/** - * Runs the started plug-ins as long as there is something to run. - * This function calls repeatedly run functions registered by started plug-ins - * until there are no more active run functions. This function is normally - * called by a thin main proram, a loader, which loads plug-ins, starts some - * plug-ins and then passes control over to the started plug-ins. - * - * @param ctx the plug-in context containing the plug-ins - */ -CP_C_API void cp_run_plugins(cp_context_t *ctx) CP_GCC_NONNULL(1); - -/** - * Runs one registered run function. This function calls one - * active run function registered by a started plug-in. When the run function - * returns this function also returns and passes control back to the main - * program. The return value can be used to determine whether there are any - * active run functions left. This function does nothing if there are no active - * registered run functions. - * - * @param ctx the plug-in context containing the plug-ins - * @return whether there are active run functions waiting to be run - */ -CP_C_API int cp_run_plugins_step(cp_context_t *ctx) CP_GCC_NONNULL(1); - -/** - * Sets startup arguments for the specified plug-in context. Like for usual - * C main functions, the first argument is expected to be the name of the - * program being executed or an empty string and the argument array should be - * terminated by NULL entry. If the main program is - * about to pass startup arguments to plug-ins it should call this function - * before starting any plug-ins in the context. The arguments are not copied - * and the caller is responsible for keeping the argument data available once - * the arguments have been set until the context is destroyed. Plug-ins can - * access the startup arguments using ::cp_get_context_args. - * - * @param ctx the plug-in context - * @param argv a NULL-terminated array of arguments - */ -CP_C_API void cp_set_context_args(cp_context_t *ctx, char **argv) CP_GCC_NONNULL(1, 2); - -/** - * Returns the startup arguments associated with the specified - * plug-in context. This function is intended to be used by a plug-in runtime. - * Startup arguments are set by the main program using ::cp_set_context_args. - * The returned argument count is zero and the array pointer is NULL if no - * arguments have been set. - * - * @param ctx the plug-in context - * @param argc a pointer to a location where the number of startup arguments is stored, or NULL for none - * @return an argument array terminated by NULL or NULL if not set - */ -CP_C_API char **cp_get_context_args(cp_context_t *ctx, int *argc) CP_GCC_NONNULL(1); - -/*@}*/ - - -/** - * @defgroup cFuncsSymbols Dynamic symbols - * @ingroup cFuncs - * - * These functions can be used to dynamically access symbols exported by the - * plug-ins. They are intended to be used by a plug-in runtime or by the main - * program. - */ -/*@{*/ - -/** - * Defines a context specific symbol. If a plug-in has symbols which have - * a plug-in instance specific value then the plug-in should define those - * symbols when it is started. The defined symbols are cleared - * automatically when the plug-in instance is stopped. Symbols can not be - * redefined. - * - * @param ctx the plug-in context - * @param name the name of the symbol - * @param ptr pointer value for the symbol - * @return @ref CP_OK (zero) on success or a status code on failure - */ -CP_C_API cp_status_t cp_define_symbol(cp_context_t *ctx, const char *name, void *ptr) CP_GCC_NONNULL(1, 2, 3); - -/** - * Resolves a symbol provided by the specified plug-in. The plug-in is started - * automatically if it is not already active. The symbol may be context - * specific or global. The framework first looks for a context specific - * symbol and then falls back to resolving a global symbol exported by the - * plug-in runtime library. The symbol can be released using - * ::cp_release_symbol when it is not needed anymore. Pointers obtained from - * this function must not be passed on to other plug-ins or the main - * program. - * - * When a plug-in runtime calls this function the plug-in framework creates - * a dynamic dependency from the symbol using plug-in to the symbol - * defining plug-in. The symbol using plug-in is stopped automatically if the - * symbol defining plug-in is about to be stopped. If the symbol using plug-in - * does not explicitly release the symbol then it is automatically released - * after a call to the stop function. It is not safe to refer to a dynamically - * resolved symbol in the stop function except to release it using - * ::cp_release_symbol. - * - * When the main program calls this function it is the responsibility of the - * main program to always release the symbol before the symbol defining plug-in - * is stopped. It is a fatal error if the symbol is not released before the - * symbol defining plug-in is stopped. - * - * @param ctx the plug-in context - * @param id the identifier of the symbol defining plug-in - * @param name the name of the symbol - * @param status a pointer to the location where the status code is to be stored, or NULL - * @return the pointer associated with the symbol or NULL on failure - */ -CP_C_API void *cp_resolve_symbol(cp_context_t *ctx, const char *id, const char *name, cp_status_t *status) CP_GCC_NONNULL(1, 2, 3); - -/** - * Releases a previously obtained symbol. The pointer must not be used after - * the symbol has been released. The symbol is released - * only after as many calls to this function as there have been for - * ::cp_resolve_symbol for the same plug-in and symbol. - * - * @param ctx the plug-in context - * @param ptr the pointer associated with the symbol - */ -CP_C_API void cp_release_symbol(cp_context_t *ctx, const void *ptr) CP_GCC_NONNULL(1, 2); - -/*@}*/ - - -#ifdef __cplusplus -} -#endif /*__cplusplus*/ - -#endif /*CPLUFF_H_*/ diff --git a/xbmc/lib/cpluff-0.1.3/libcpluff/cpluffdef.h b/xbmc/lib/cpluff-0.1.3/libcpluff/cpluffdef.h deleted file mode 100644 index c529386702..0000000000 --- a/xbmc/lib/cpluff-0.1.3/libcpluff/cpluffdef.h +++ /dev/null @@ -1,200 +0,0 @@ -/*------------------------------------------------------------------------- - * C-Pluff, a plug-in framework for C - * Copyright 2007 Johannes Lehtinen - * - * Permission is hereby granted, free of charge, to any person obtaining a - * copy of this software and associated documentation files (the "Software"), - * to deal in the Software without restriction, including without limitation - * the rights to use, copy, modify, merge, publish, distribute, sublicense, - * and/or sell copies of the Software, and to permit persons to whom the - * Software is furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included - * in all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS - * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF - * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. - * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY - * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, - * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE - * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - *-----------------------------------------------------------------------*/ - -/** @file - * Common defines shared by C-Pluff C and C++ APIs. - * This file is automatically included by the top level C and C++ - * API header files. There should be no need to include it explicitly. - */ - -#ifndef CPLUFFDEF_H_ -#define CPLUFFDEF_H_ - - -/* ------------------------------------------------------------------------ - * Version information - * ----------------------------------------------------------------------*/ - -/** - * @defgroup versionInfo Version information - * @ingroup cDefines cxxDefines - * - * C-Pluff version information. Notice that this version information - * is static version information included in header files. The - * macros introduced here can be used for compile time checks. - */ -/*@{*/ - -/** - * The C-Pluff release version string. This string identifies a specific - * version of the C-Pluff distribution. Compile time software compatibility - * checks should use #CP_VERSION_MAJOR and #CP_VERSION_MINOR instead. - */ -#define CP_VERSION "0.1.3" - -/** - * The major version number component of the release version. This is an - * integer. - */ -#define CP_VERSION_MAJOR 0 - -/** - * The minor version number component of the release version. This is an - * integer. - */ -#define CP_VERSION_MINOR 1 - -/*@}*/ - - -/* ------------------------------------------------------------------------ - * Symbol visibility - * ----------------------------------------------------------------------*/ - -/** - * @defgroup symbolVisibility Symbol visibility - * @ingroup cDefines cxxDefines - * - * Macros for controlling inter-module symbol visibility and linkage. These - * macros have platform specific values. #CP_EXPORT, #CP_IMPORT and #CP_HIDDEN - * can be reused by plug-in implementations for better portability. The - * complexity is mostly due to Windows DLL exports and imports. - * - * @anchor symbolVisibilityExample - * Each module should usually define its own macro to declare API symbols with - * #CP_EXPORT and #CP_IMPORT as necessary. For example, a mobule could define - * a macro @c MY_API in the API header file as follows. - * - * @code - * #ifndef MY_API - * # define MY_API CP_IMPORT - * #endif - * @endcode - * - * By default the API symbols would then be marked for import which is correct - * when client modules are including the API header file. When compiling the - * module itself the option @c -DMY_API=CP_EXPORT would be passed to the compiler to - * override the API header file and to mark the API symbols for export. - * The overriding definition could also be included in module source files or - * in an internal header file before including the API header file. - */ -/*@{*/ - -/** - * @def CP_EXPORT - * - * Declares a symbol to be exported for inter-module usage. When compiling the - * module which defines the symbol this macro should be placed - * at the start of the symbol declaration to ensure that the symbol is exported - * to other modules. However, when compiling other modules the declaration of - * the symbol should start with #CP_IMPORT. - * See @ref symbolVisibilityExample "the example" of how to do this. - */ - -/** - * @def CP_IMPORT - * - * Declares a symbol to be imported from another module. When compiling a - * module which uses the symbol this macro should be placed at the start of - * the symbol declaration to ensure that the symbol is imported from the - * defining module. However, when compiling the defining module the declaration - * of the symbol should start with #CP_EXPORT. - * See @ref symbolVisibilityExample "the example" of how to do this. - */ - -/** - * @def CP_HIDDEN - * - * Declares a symbol hidden from other modules. This macro should be - * placed at the start of the symbol declaration to hide the symbol from other - * modules (if supported by the platform). This macro is not intended to be - * used with symbols declared as "static" which are already internal to the - * object file. Some platforms do not support hiding of symbols and therefore - * unique prefixes should be used for global symbols internal to the module - * even when they are declared using this macro. - */ - -#if defined(_WIN32) -# define CP_EXPORT __declspec(dllexport) -# define CP_IMPORT extern __declspec(dllimport) -# define CP_HIDDEN -#elif defined(__GNUC__) && (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 3)) -# define CP_EXPORT -# define CP_IMPORT extern -# define CP_HIDDEN __attribute__ ((visibility ("hidden"))) -#else -# define CP_EXPORT -# define CP_IMPORT extern -# define CP_HIDDEN -#endif - -/*@}*/ - - -/* ------------------------------------------------------------------------ - * GCC attributes - * ----------------------------------------------------------------------*/ - -/** - * @defgroup cDefinesGCCAttributes GCC attributes - * @ingroup cDefines cxxDefines - * - * These macros conditionally define GCC attributes for declarations. - * They are used in C-Pluff API declarations to enable better optimization - * and error checking when using GCC. In non-GCC platforms they have - * empty values. - */ -/*@{*/ - -/** - * @def CP_GCC_PURE - * - * Declares a function as pure function having no side effects. - * This attribute is supported in GCC since version 2.96. - * Such functions can be subject to common subexpression elimination - * and loop optimization. - */ - -/** - * @def CP_GCC_NONNULL - * - * Specifies that some pointer arguments to a function should have - * non-NULL values. Takes a variable length list of argument indexes as - * arguments. This attribute is supported in GCC since version 3.3. - * It can be used for enhanced error checking and some optimizations. - */ - -#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 96) -#define CP_GCC_PURE __attribute__((pure)) -#else -#define CP_GCC_PURE -#endif -#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 3) -#define CP_GCC_NONNULL(...) __attribute__((nonnull (__VA_ARGS__))) -#else -#define CP_GCC_NONNULL(...) -#endif - -/*@}*/ - -#endif /*CPLUFFDEF_H_*/ diff --git a/xbmc/lib/cpluff-0.1.3/libcpluff/docsrc/Doxyfile-impl b/xbmc/lib/cpluff-0.1.3/libcpluff/docsrc/Doxyfile-impl deleted file mode 100644 index c896c32c52..0000000000 --- a/xbmc/lib/cpluff-0.1.3/libcpluff/docsrc/Doxyfile-impl +++ /dev/null @@ -1,1256 +0,0 @@ -# Doxyfile 1.5.1 - -# Copyright 2007 Johannes Lehtinen -# This configuration file is free software; Johannes Lehtinen gives unlimited -# permission to copy, distribute and modify it. - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project -# -# All text after a hash (#) is considered a comment and will be ignored -# The format is: -# TAG = value [value, ...] -# For lists items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (" ") - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded -# by quotes) that should identify the project. - -PROJECT_NAME = "C-Pluff C Implementation" - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. -# This could be handy for archiving the generated documentation or -# if some version control system is used. - -PROJECT_NUMBER = "0.1.3 (API version @CP_CORE_API_CURRENT@)" - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) -# base path where the generated documentation will be put. -# If a relative path is entered, it will be relative to the location -# where doxygen was started. If left blank the current directory will be used. - -OUTPUT_DIRECTORY = - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create -# 4096 sub-directories (in 2 levels) under the output directory of each output -# format and will distribute the generated files over these directories. -# Enabling this option can be useful when feeding doxygen a huge amount of -# source files, where putting all generated files in the same directory would -# otherwise cause performance problems for the file system. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# The default language is English, other supported languages are: -# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, -# Croatian, Czech, Danish, Dutch, Finnish, French, German, Greek, Hungarian, -# Italian, Japanese, Japanese-en (Japanese with English messages), Korean, -# Korean-en, Lithuanian, Norwegian, Polish, Portuguese, Romanian, Russian, -# Serbian, Slovak, Slovene, Spanish, Swedish, and Ukrainian. - -OUTPUT_LANGUAGE = English - -# This tag can be used to specify the encoding used in the generated output. -# The encoding is not always determined by the language that is chosen, -# but also whether or not the output is meant for Windows or non-Windows users. -# In case there is a difference, setting the USE_WINDOWS_ENCODING tag to YES -# forces the Windows encoding (this is the default for the Windows binary), -# whereas setting the tag to NO uses a Unix-style encoding (the default for -# all platforms other than Windows). - -USE_WINDOWS_ENCODING = NO - -# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will -# include brief member descriptions after the members that are listed in -# the file and class documentation (similar to JavaDoc). -# Set to NO to disable this. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend -# the brief description of a member or function before the detailed description. -# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator -# that is used to form the text in various listings. Each string -# in this list, if found as the leading text of the brief description, will be -# stripped from the text and the result after processing the whole list, is -# used as the annotated text. Otherwise, the brief description is used as-is. -# If left blank, the following values are used ("$name" is automatically -# replaced with the name of the entity): "The $name class" "The $name widget" -# "The $name file" "is" "provides" "specifies" "contains" -# "represents" "a" "an" "the" - -ABBREVIATE_BRIEF = - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# Doxygen will generate a detailed section even if there is only a brief -# description. - -ALWAYS_DETAILED_SEC = YES - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full -# path before files name in the file list and in the header files. If set -# to NO the shortest path that makes the file name unique will be used. - -FULL_PATH_NAMES = YES - -# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag -# can be used to strip a user-defined part of the path. Stripping is -# only done if one of the specified strings matches the left-hand part of -# the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the -# path to strip. - -STRIP_FROM_PATH = - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of -# the path mentioned in the documentation of a class, which tells -# the reader which header file to include in order to use a class. -# If left blank only the name of the header file containing the class -# definition is used. Otherwise one should specify the include paths that -# are normally passed to the compiler using the -I flag. - -STRIP_FROM_INC_PATH = - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter -# (but less readable) file names. This can be useful is your file systems -# doesn't support long names like on DOS, Mac, or CD-ROM. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen -# will interpret the first line (until the first dot) of a JavaDoc-style -# comment as the brief description. If set to NO, the JavaDoc -# comments will behave just like the Qt-style comments (thus requiring an -# explicit @brief command for a brief description. - -JAVADOC_AUTOBRIEF = YES - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen -# treat a multi-line C++ special comment block (i.e. a block of //! or /// -# comments) as a brief description. This used to be the default behaviour. -# The new default is to treat a multi-line C++ comment block as a detailed -# description. Set this tag to YES if you prefer the old behaviour instead. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the DETAILS_AT_TOP tag is set to YES then Doxygen -# will output the detailed description near the top, like JavaDoc. -# If set to NO, the detailed description appears after the member -# documentation. - -DETAILS_AT_TOP = YES - -# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented -# member inherits the documentation from any documented member that it -# re-implements. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce -# a new page for each member. If set to NO, the documentation of a member will -# be part of the file/class/namespace that contains it. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. -# Doxygen uses this value to replace tabs by spaces in code fragments. - -TAB_SIZE = 8 - -# This tag can be used to specify a number of aliases that acts -# as commands in the documentation. An alias has the form "name=value". -# For example adding "sideeffect=\par Side Effects:\n" will allow you to -# put the command \sideeffect (or @sideeffect) in the documentation, which -# will result in a user-defined paragraph with heading "Side Effects:". -# You can put \n's in the value part of an alias to insert newlines. - -ALIASES = - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C -# sources only. Doxygen will then generate output that is more tailored for C. -# For instance, some of the names that are used will be different. The list -# of all members will be omitted, etc. - -OPTIMIZE_OUTPUT_FOR_C = YES - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java -# sources only. Doxygen will then generate output that is more tailored for Java. -# For instance, namespaces will be presented as packages, qualified scopes -# will look different, etc. - -OPTIMIZE_OUTPUT_JAVA = NO - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want to -# include (a tag file for) the STL sources as input, then you should -# set this tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. -# func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. - -BUILTIN_STL_SUPPORT = NO - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES (the default) to allow class member groups of -# the same type (for instance a group of public functions) to be put as a -# subgroup of that type (e.g. under the Public Functions section). Set it to -# NO to prevent subgrouping. Alternatively, this can be done per class using -# the \nosubgrouping command. - -SUBGROUPING = YES - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. -# Private class members and static file members will be hidden unless -# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES - -EXTRACT_ALL = YES - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class -# will be included in the documentation. - -EXTRACT_PRIVATE = YES - -# If the EXTRACT_STATIC tag is set to YES all static members of a file -# will be included in the documentation. - -EXTRACT_STATIC = YES - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) -# defined locally in source files will be included in the documentation. -# If set to NO only classes defined in header files are included. - -EXTRACT_LOCAL_CLASSES = YES - -# This flag is only useful for Objective-C code. When set to YES local -# methods, which are defined in the implementation section but not in -# the interface are included in the documentation. -# If set to NO (the default) only methods in the interface are included. - -EXTRACT_LOCAL_METHODS = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all -# undocumented members of documented classes, files or namespaces. -# If set to NO (the default) these members will be included in the -# various overviews, but no documentation section is generated. -# This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. -# If set to NO (the default) these classes will be included in the various -# overviews. This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all -# friend (class|struct|union) declarations. -# If set to NO (the default) these declarations will be included in the -# documentation. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any -# documentation blocks found inside the body of a function. -# If set to NO (the default) these blocks will be appended to the -# function's detailed documentation block. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation -# that is typed after a \internal command is included. If the tag is set -# to NO (the default) then the documentation will be excluded. -# Set it to YES to include the internal documentation. - -INTERNAL_DOCS = YES - -# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate -# file names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. - -CASE_SENSE_NAMES = YES - -# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen -# will show members with their full class and namespace scopes in the -# documentation. If set to YES the scope will be hidden. - -HIDE_SCOPE_NAMES = YES - -# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen -# will put a list of the files that are included by a file in the documentation -# of that file. - -SHOW_INCLUDE_FILES = YES - -# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] -# is inserted in the documentation for inline members. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen -# will sort the (detailed) documentation of file and class members -# alphabetically by member name. If set to NO the members will appear in -# declaration order. - -SORT_MEMBER_DOCS = NO - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the -# brief documentation of file, namespace and class members alphabetically -# by member name. If set to NO (the default) the members will appear in -# declaration order. - -SORT_BRIEF_DOCS = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be -# sorted by fully-qualified names, including namespaces. If set to -# NO (the default), the class list will be sorted only by class name, -# not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the -# alphabetical list. - -SORT_BY_SCOPE_NAME = NO - -# The GENERATE_TODOLIST tag can be used to enable (YES) or -# disable (NO) the todo list. This list is created by putting \todo -# commands in the documentation. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or -# disable (NO) the test list. This list is created by putting \test -# commands in the documentation. - -GENERATE_TESTLIST = YES - -# The GENERATE_BUGLIST tag can be used to enable (YES) or -# disable (NO) the bug list. This list is created by putting \bug -# commands in the documentation. - -GENERATE_BUGLIST = YES - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or -# disable (NO) the deprecated list. This list is created by putting -# \deprecated commands in the documentation. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional -# documentation sections, marked by \if sectionname ... \endif. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines -# the initial value of a variable or define consists of for it to appear in -# the documentation. If the initializer consists of more lines than specified -# here it will be hidden. Use a value of 0 to hide initializers completely. -# The appearance of the initializer of individual variables and defines in the -# documentation can be controlled using \showinitializer or \hideinitializer -# command in the documentation regardless of this setting. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated -# at the bottom of the documentation of classes and structs. If set to YES the -# list will mention the files that were used to generate the documentation. - -SHOW_USED_FILES = YES - -# If the sources in your project are distributed over multiple directories -# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy -# in the documentation. The default is NO. - -SHOW_DIRECTORIES = NO - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from the -# version control system). Doxygen will invoke the program by executing (via -# popen()) the command <command> <input-file>, where <command> is the value of -# the FILE_VERSION_FILTER tag, and <input-file> is the name of an input file -# provided by doxygen. Whatever the program writes to standard output -# is used as the file version. See the manual for examples. - -FILE_VERSION_FILTER = - -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated -# by doxygen. Possible values are YES and NO. If left blank NO is used. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated by doxygen. Possible values are YES and NO. If left blank -# NO is used. - -WARNINGS = YES - -# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings -# for undocumented members. If EXTRACT_ALL is set to YES then this flag will -# automatically be disabled. - -WARN_IF_UNDOCUMENTED = YES - -# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some -# parameters in a documented function, or documenting parameters that -# don't exist or using markup commands wrongly. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be abled to get warnings for -# functions that are documented, but have no documentation for their parameters -# or return value. If set to NO (the default) doxygen will only warn about -# wrong or incomplete parameter documentation, but not about the absence of -# documentation. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that -# doxygen can produce. The string should contain the $file, $line, and $text -# tags, which will be replaced by the file and line number from which the -# warning originated and the warning text. Optionally the format may contain -# $version, which will be replaced by the version of the file (if it could -# be obtained via FILE_VERSION_FILTER) - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning -# and error messages should be written. If left blank the output is written -# to stderr. - -WARN_LOGFILE = - -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag can be used to specify the files and/or directories that contain -# documented source files. You may enter file names like "myfile.cpp" or -# directories like "/usr/src/myproject". Separate the files or directories -# with spaces. - -INPUT = . - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank the following patterns are tested: -# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx -# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py - -FILE_PATTERNS = *.c *.h - -# The RECURSIVE tag can be used to turn specify whether or not subdirectories -# should be searched for input files as well. Possible values are YES and NO. -# If left blank NO is used. - -RECURSIVE = NO - -# The EXCLUDE tag can be used to specify files and/or directories that should -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used select whether or not files or -# directories that are symbolic links (a Unix filesystem feature) are excluded -# from the input. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. Note that the wildcards are matched -# against the file with absolute path, so to exclude all test directories -# for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or -# directories that contain example code fragments that are included (see -# the \include command). - -EXAMPLE_PATH = - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank all files are included. - -EXAMPLE_PATTERNS = - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude -# commands irrespective of the value of the RECURSIVE tag. -# Possible values are YES and NO. If left blank NO is used. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or -# directories that contain image that are included in the documentation (see -# the \image command). - -IMAGE_PATH = - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command <filter> <input-file>, where <filter> -# is the value of the INPUT_FILTER tag, and <input-file> is the name of an -# input file. Doxygen will then use the output that the filter program writes -# to standard output. If FILTER_PATTERNS is specified, this tag will be -# ignored. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. The filters are a list of the form: -# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further -# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER -# is applied to all files. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will be used to filter the input files when producing source -# files to browse (i.e. when SOURCE_BROWSER is set to YES). - -FILTER_SOURCE_FILES = NO - -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will -# be generated. Documented entities will be cross-referenced with these sources. -# Note: To get rid of all source code in the generated output, make sure also -# VERBATIM_HEADERS is set to NO. - -SOURCE_BROWSER = YES - -# Setting the INLINE_SOURCES tag to YES will include the body -# of functions and classes directly in the documentation. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct -# doxygen to hide any special comment blocks from generated source code -# fragments. Normal C and C++ comments will always remain visible. - -STRIP_CODE_COMMENTS = YES - -# If the REFERENCED_BY_RELATION tag is set to YES (the default) -# then for each documented function all documented -# functions referencing it will be listed. - -REFERENCED_BY_RELATION = YES - -# If the REFERENCES_RELATION tag is set to YES (the default) -# then for each documented function all documented entities -# called/used by that function will be listed. - -REFERENCES_RELATION = YES - -# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) -# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from -# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will -# link to the source code. Otherwise they will link to the documentstion. - -REFERENCES_LINK_SOURCE = YES - -# If the USE_HTAGS tag is set to YES then the references to source code -# will point to the HTML generated by the htags(1) tool instead of doxygen -# built-in source browser. The htags tool is part of GNU's global source -# tagging system (see http://www.gnu.org/software/global/global.html). You -# will need version 4.8.6 or higher. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen -# will generate a verbatim copy of the header file for each class for -# which an include is specified. Set to NO to disable this. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index -# of all compounds will be generated. Enable this if the project -# contains a lot of classes, structs, unions or interfaces. - -ALPHABETICAL_INDEX = NO - -# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then -# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns -# in which this list will be split (can be a number in the range [1..20]) - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all -# classes will be put under the same header in the alphabetical index. -# The IGNORE_PREFIX tag can be used to specify one or more prefixes that -# should be ignored while generating the index headers. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES (the default) Doxygen will -# generate HTML output. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `html' will be used as the default path. - -HTML_OUTPUT = html - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for -# each generated HTML page (for example: .htm,.php,.asp). If it is left blank -# doxygen will generate files with .html extension. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a personal HTML header for -# each generated HTML page. If it is left blank doxygen will generate a -# standard header. - -HTML_HEADER = - -# The HTML_FOOTER tag can be used to specify a personal HTML footer for -# each generated HTML page. If it is left blank doxygen will generate a -# standard footer. - -HTML_FOOTER = doxygen.footer - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading -# style sheet that is used by each HTML page. It can be used to -# fine-tune the look of the HTML output. If the tag is left blank doxygen -# will generate a default style sheet. Note that doxygen will try to copy -# the style sheet file to the HTML output directory, so don't put your own -# stylesheet in the HTML output directory as well, or it will be erased! - -HTML_STYLESHEET = doxygen.css - -# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, -# files or namespaces will be aligned in HTML using tables. If set to -# NO a bullet list will be used. - -HTML_ALIGN_MEMBERS = YES - -# If the GENERATE_HTMLHELP tag is set to YES, additional index files -# will be generated that can be used as input for tools like the -# Microsoft HTML help workshop to generate a compressed HTML help file (.chm) -# of the generated HTML documentation. - -GENERATE_HTMLHELP = NO - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can -# be used to specify the file name of the resulting .chm file. You -# can add a path in front of the file if the result should not be -# written to the html output directory. - -CHM_FILE = - -# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can -# be used to specify the location (absolute path including file name) of -# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run -# the HTML help compiler on the generated index.hhp. - -HHC_LOCATION = - -# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag -# controls if a separate .chi index file is generated (YES) or that -# it should be included in the master .chm file (NO). - -GENERATE_CHI = NO - -# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag -# controls whether a binary table of contents is generated (YES) or a -# normal table of contents (NO) in the .chm file. - -BINARY_TOC = NO - -# The TOC_EXPAND flag can be set to YES to add extra items for group members -# to the contents of the HTML help documentation and to the tree view. - -TOC_EXPAND = NO - -# The DISABLE_INDEX tag can be used to turn on/off the condensed index at -# top of each HTML page. The value NO (the default) enables the index and -# the value YES disables it. - -DISABLE_INDEX = NO - -# This tag can be used to set the number of enum values (range [1..20]) -# that doxygen will group on one line in the generated HTML documentation. - -ENUM_VALUES_PER_LINE = 4 - -# If the GENERATE_TREEVIEW tag is set to YES, a side panel will be -# generated containing a tree-like index structure (just like the one that -# is generated for HTML Help). For this to work a browser that supports -# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, -# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are -# probably better off using the HTML help feature. - -GENERATE_TREEVIEW = NO - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be -# used to set the initial width (in pixels) of the frame in which the tree -# is shown. - -TREEVIEW_WIDTH = 250 - -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- - -# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will -# generate Latex output. - -GENERATE_LATEX = YES - -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `latex' will be used as the default path. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. If left blank `latex' will be used as the default command name. - -LATEX_CMD_NAME = latex - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to -# generate index for LaTeX. If left blank `makeindex' will be used as the -# default command name. - -MAKEINDEX_CMD_NAME = makeindex - -# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact -# LaTeX documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_LATEX = NO - -# The PAPER_TYPE tag can be used to set the paper type that is used -# by the printer. Possible values are: a4, a4wide, letter, legal and -# executive. If left blank a4wide will be used. - -PAPER_TYPE = a4wide - -# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX -# packages that should be included in the LaTeX output. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for -# the generated latex document. The header should contain everything until -# the first chapter. If it is left blank doxygen will generate a -# standard header. Notice: only use this tag if you know what you are doing! - -LATEX_HEADER = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated -# is prepared for conversion to pdf (using ps2pdf). The pdf file will -# contain links (just like the HTML output) instead of page references -# This makes the output suitable for online browsing using a pdf viewer. - -PDF_HYPERLINKS = NO - -# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of -# plain latex in the generated Makefile. Set this option to YES to get a -# higher quality PDF documentation. - -USE_PDFLATEX = NO - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. -# command to the generated LaTeX files. This will instruct LaTeX to keep -# running if errors occur, instead of asking the user for help. -# This option is also used when generating formulas in HTML. - -LATEX_BATCHMODE = NO - -# If LATEX_HIDE_INDICES is set to YES then doxygen will not -# include the index chapters (such as File Index, Compound Index, etc.) -# in the output. - -LATEX_HIDE_INDICES = NO - -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- - -# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output -# The RTF output is optimized for Word 97 and may not look very pretty with -# other RTF readers or editors. - -GENERATE_RTF = NO - -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `rtf' will be used as the default path. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES Doxygen generates more compact -# RTF documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated -# will contain hyperlink fields. The RTF file will -# contain links (just like the HTML output) instead of page references. -# This makes the output suitable for online browsing using WORD or other -# programs which support those fields. -# Note: wordpad (write) and others do not support links. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# config file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an rtf document. -# Syntax is similar to doxygen's config file. - -RTF_EXTENSIONS_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- - -# If the GENERATE_MAN tag is set to YES (the default) Doxygen will -# generate man pages - -GENERATE_MAN = NO - -# The MAN_OUTPUT tag is used to specify where the man pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `man' will be used as the default path. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to -# the generated man pages (default is the subroutine's section .3) - -MAN_EXTENSION = .3 - -# If the MAN_LINKS tag is set to YES and Doxygen generates man output, -# then it will generate one additional man file for each entity -# documented in the real man page(s). These additional files -# only source the real man page, but without them the man command -# would be unable to find the correct page. The default is NO. - -MAN_LINKS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- - -# If the GENERATE_XML tag is set to YES Doxygen will -# generate an XML file that captures the structure of -# the code including all documentation. - -GENERATE_XML = NO - -# The XML_OUTPUT tag is used to specify where the XML pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `xml' will be used as the default path. - -XML_OUTPUT = xml - -# The XML_SCHEMA tag can be used to specify an XML schema, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify an XML DTD, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES Doxygen will -# dump the program listings (including syntax highlighting -# and cross-referencing information) to the XML output. Note that -# enabling this will significantly increase the size of the XML output. - -XML_PROGRAMLISTING = YES - -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- - -# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will -# generate an AutoGen Definitions (see autogen.sf.net) file -# that captures the structure of the code including all -# documentation. Note that this feature is still experimental -# and incomplete at the moment. - -GENERATE_AUTOGEN_DEF = NO - -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- - -# If the GENERATE_PERLMOD tag is set to YES Doxygen will -# generate a Perl module file that captures the structure of -# the code including all documentation. Note that this -# feature is still experimental and incomplete at the -# moment. - -GENERATE_PERLMOD = NO - -# If the PERLMOD_LATEX tag is set to YES Doxygen will generate -# the necessary Makefile rules, Perl scripts and LaTeX code to be able -# to generate PDF and DVI output from the Perl module output. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be -# nicely formatted so it can be parsed by a human reader. This is useful -# if you want to understand what is going on. On the other hand, if this -# tag is set to NO the size of the Perl module output will be much smaller -# and Perl will parse it just the same. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file -# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. -# This is useful so different doxyrules.make files included by the same -# Makefile don't overwrite each other's variables. - -PERLMOD_MAKEVAR_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- - -# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will -# evaluate all C-preprocessor directives found in the sources and include -# files. - -ENABLE_PREPROCESSING = YES - -# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro -# names in the source code. If set to NO (the default) only conditional -# compilation will be performed. Macro expansion can be done in a controlled -# way by setting EXPAND_ONLY_PREDEF to YES. - -MACRO_EXPANSION = YES - -# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES -# then the macro expansion is limited to the macros specified with the -# PREDEFINED and EXPAND_AS_DEFINED tags. - -EXPAND_ONLY_PREDEF = NO - -# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files -# in the INCLUDE_PATH (see below) will be search if a #include is found. - -SEARCH_INCLUDES = YES - -# The INCLUDE_PATH tag can be used to specify one or more directories that -# contain include files that are not input files but should be processed by -# the preprocessor. - -INCLUDE_PATH = - -# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard -# patterns (like *.h and *.hpp) to filter out the header-files in the -# directories. If left blank, the patterns specified with FILE_PATTERNS will -# be used. - -INCLUDE_FILE_PATTERNS = - -# The PREDEFINED tag can be used to specify one or more macro names that -# are defined before the preprocessor is started (similar to the -D option of -# gcc). The argument of the tag is a list of macros of the form: name -# or name=definition (no spaces). If the definition and the = are -# omitted =1 is assumed. To prevent a macro definition from being -# undefined via #undef or recursively expanded use the := operator -# instead of the = operator. - -PREDEFINED = - -# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then -# this tag can be used to specify a list of macro names that should be expanded. -# The macro definition that is found in the sources will be used. -# Use the PREDEFINED tag if you want to use a different macro definition. - -EXPAND_AS_DEFINED = - -# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then -# doxygen's preprocessor will remove all function-like macros that are alone -# on a line, have an all uppercase name, and do not end with a semicolon. Such -# function macros are typically used for boiler-plate code, and will confuse -# the parser if not removed. - -SKIP_FUNCTION_MACROS = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- - -# The TAGFILES option can be used to specify one or more tagfiles. -# Optionally an initial location of the external documentation -# can be added for each tagfile. The format of a tag file without -# this location is as follows: -# TAGFILES = file1 file2 ... -# Adding location for the tag files is done as follows: -# TAGFILES = file1=loc1 "file2 = loc2" ... -# where "loc1" and "loc2" can be relative or absolute paths or -# URLs. If a location is present for each tag, the installdox tool -# does not have to be run to correct the links. -# Note that each tag file must have a unique name -# (where the name does NOT include the path) -# If a tag file is not located in the directory in which doxygen -# is run, you must also specify the path to the tagfile here. - -TAGFILES = - -# When a file name is specified after GENERATE_TAGFILE, doxygen will create -# a tag file that is based on the input files it reads. - -GENERATE_TAGFILE = - -# If the ALLEXTERNALS tag is set to YES all external classes will be listed -# in the class index. If set to NO only the inherited external classes -# will be listed. - -ALLEXTERNALS = NO - -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will -# be listed. - -EXTERNAL_GROUPS = YES - -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of `which perl'). - -PERL_PATH = /usr/bin/perl - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will -# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base -# or super classes. Setting the tag to NO turns the diagrams off. Note that -# this option is superseded by the HAVE_DOT option below. This is only a -# fallback. It is recommended to install and use dot, since it yields more -# powerful graphs. - -CLASS_DIAGRAMS = YES - -# If set to YES, the inheritance and collaboration graphs will hide -# inheritance and usage relations if the target is undocumented -# or is not a class. - -HIDE_UNDOC_RELATIONS = YES - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz, a graph visualization -# toolkit from AT&T and Lucent Bell Labs. The other options in this section -# have no effect if this option is set to NO (the default) - -HAVE_DOT = YES - -# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect inheritance relations. Setting this tag to YES will force the -# the CLASS_DIAGRAMS tag to NO. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect implementation dependencies (inheritance, containment, and -# class references variables) of the class with other documented classes. - -COLLABORATION_GRAPH = YES - -# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for groups, showing the direct groups dependencies - -GROUP_GRAPHS = NO - -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. - -UML_LOOK = NO - -# If set to YES, the inheritance and collaboration graphs will show the -# relations between templates and their instances. - -TEMPLATE_RELATIONS = NO - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT -# tags are set to YES then doxygen will generate a graph for each documented -# file showing the direct and indirect include dependencies of the file with -# other documented files. - -INCLUDE_GRAPH = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and -# HAVE_DOT tags are set to YES then doxygen will generate a graph for each -# documented header file showing the documented files that directly or -# indirectly include this file. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH and HAVE_DOT tags are set to YES then doxygen will -# generate a call dependency graph for every global function or class method. -# Note that enabling this option will significantly increase the time of a run. -# So in most cases it will be better to enable call graphs for selected -# functions only using the \callgraph command. - -CALL_GRAPH = YES - -# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then doxygen will -# generate a caller dependency graph for every global function or class method. -# Note that enabling this option will significantly increase the time of a run. -# So in most cases it will be better to enable caller graphs for selected -# functions only using the \callergraph command. - -CALLER_GRAPH = YES - -# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen -# will graphical hierarchy of all classes instead of a textual one. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES -# then doxygen will show the dependencies a directory has on other directories -# in a graphical way. The dependency relations are determined by the #include -# relations between the files in the directories. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. Possible values are png, jpg, or gif -# If left blank png will be used. - -DOT_IMAGE_FORMAT = png - -# The tag DOT_PATH can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. - -DOT_PATH = - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the -# \dotfile command). - -DOTFILE_DIRS = - -# The MAX_DOT_GRAPH_WIDTH tag can be used to set the maximum allowed width -# (in pixels) of the graphs generated by dot. If a graph becomes larger than -# this value, doxygen will try to truncate the graph, so that it fits within -# the specified constraint. Beware that most browsers cannot cope with very -# large images. - -MAX_DOT_GRAPH_WIDTH = 1024 - -# The MAX_DOT_GRAPH_HEIGHT tag can be used to set the maximum allows height -# (in pixels) of the graphs generated by dot. If a graph becomes larger than -# this value, doxygen will try to truncate the graph, so that it fits within -# the specified constraint. Beware that most browsers cannot cope with very -# large images. - -MAX_DOT_GRAPH_HEIGHT = 1024 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the -# graphs generated by dot. A depth value of 3 means that only nodes reachable -# from the root by following a path via at most 3 edges will be shown. Nodes -# that lay further from the root node will be omitted. Note that setting this -# option to 1 or 2 may greatly reduce the computation time needed for large -# code bases. Also note that a graph may be further truncated if the graph's -# image dimensions are not sufficient to fit the graph (see MAX_DOT_GRAPH_WIDTH -# and MAX_DOT_GRAPH_HEIGHT). If 0 is used for the depth value (the default), -# the graph is not depth-constrained. - -MAX_DOT_GRAPH_DEPTH = 0 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, which results in a white background. -# Warning: Depending on the platform used, enabling this option may lead to -# badly anti-aliased labels on the edges of a graph (i.e. they become hard to -# read). - -DOT_TRANSPARENT = YES - -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) -# support this, this feature is disabled by default. - -DOT_MULTI_TARGETS = NO - -# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will -# generate a legend page explaining the meaning of the various boxes and -# arrows in the dot generated graphs. - -GENERATE_LEGEND = YES - -# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will -# remove the intermediate dot files that are used to generate -# the various graphs. - -DOT_CLEANUP = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to the search engine -#--------------------------------------------------------------------------- - -# The SEARCHENGINE tag specifies whether or not a search engine should be -# used. If set to NO the values of all tags below this one will be ignored. - -SEARCHENGINE = NO diff --git a/xbmc/lib/cpluff-0.1.3/libcpluff/docsrc/Doxyfile-ref b/xbmc/lib/cpluff-0.1.3/libcpluff/docsrc/Doxyfile-ref deleted file mode 100644 index 337f26add2..0000000000 --- a/xbmc/lib/cpluff-0.1.3/libcpluff/docsrc/Doxyfile-ref +++ /dev/null @@ -1,1256 +0,0 @@ -# Doxyfile 1.5.1 - -# Copyright 2007 Johannes Lehtinen -# This configuration file is free software; Johannes Lehtinen gives unlimited -# permission to copy, distribute and modify it. - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project -# -# All text after a hash (#) is considered a comment and will be ignored -# The format is: -# TAG = value [value, ...] -# For lists items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (" ") - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded -# by quotes) that should identify the project. - -PROJECT_NAME = "C-Pluff C API" - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. -# This could be handy for archiving the generated documentation or -# if some version control system is used. - -PROJECT_NUMBER = "0.1.3" - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) -# base path where the generated documentation will be put. -# If a relative path is entered, it will be relative to the location -# where doxygen was started. If left blank the current directory will be used. - -OUTPUT_DIRECTORY = - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create -# 4096 sub-directories (in 2 levels) under the output directory of each output -# format and will distribute the generated files over these directories. -# Enabling this option can be useful when feeding doxygen a huge amount of -# source files, where putting all generated files in the same directory would -# otherwise cause performance problems for the file system. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# The default language is English, other supported languages are: -# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, -# Croatian, Czech, Danish, Dutch, Finnish, French, German, Greek, Hungarian, -# Italian, Japanese, Japanese-en (Japanese with English messages), Korean, -# Korean-en, Lithuanian, Norwegian, Polish, Portuguese, Romanian, Russian, -# Serbian, Slovak, Slovene, Spanish, Swedish, and Ukrainian. - -OUTPUT_LANGUAGE = English - -# This tag can be used to specify the encoding used in the generated output. -# The encoding is not always determined by the language that is chosen, -# but also whether or not the output is meant for Windows or non-Windows users. -# In case there is a difference, setting the USE_WINDOWS_ENCODING tag to YES -# forces the Windows encoding (this is the default for the Windows binary), -# whereas setting the tag to NO uses a Unix-style encoding (the default for -# all platforms other than Windows). - -USE_WINDOWS_ENCODING = NO - -# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will -# include brief member descriptions after the members that are listed in -# the file and class documentation (similar to JavaDoc). -# Set to NO to disable this. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend -# the brief description of a member or function before the detailed description. -# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator -# that is used to form the text in various listings. Each string -# in this list, if found as the leading text of the brief description, will be -# stripped from the text and the result after processing the whole list, is -# used as the annotated text. Otherwise, the brief description is used as-is. -# If left blank, the following values are used ("$name" is automatically -# replaced with the name of the entity): "The $name class" "The $name widget" -# "The $name file" "is" "provides" "specifies" "contains" -# "represents" "a" "an" "the" - -ABBREVIATE_BRIEF = - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# Doxygen will generate a detailed section even if there is only a brief -# description. - -ALWAYS_DETAILED_SEC = YES - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full -# path before files name in the file list and in the header files. If set -# to NO the shortest path that makes the file name unique will be used. - -FULL_PATH_NAMES = YES - -# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag -# can be used to strip a user-defined part of the path. Stripping is -# only done if one of the specified strings matches the left-hand part of -# the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the -# path to strip. - -STRIP_FROM_PATH = - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of -# the path mentioned in the documentation of a class, which tells -# the reader which header file to include in order to use a class. -# If left blank only the name of the header file containing the class -# definition is used. Otherwise one should specify the include paths that -# are normally passed to the compiler using the -I flag. - -STRIP_FROM_INC_PATH = - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter -# (but less readable) file names. This can be useful is your file systems -# doesn't support long names like on DOS, Mac, or CD-ROM. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen -# will interpret the first line (until the first dot) of a JavaDoc-style -# comment as the brief description. If set to NO, the JavaDoc -# comments will behave just like the Qt-style comments (thus requiring an -# explicit @brief command for a brief description. - -JAVADOC_AUTOBRIEF = YES - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen -# treat a multi-line C++ special comment block (i.e. a block of //! or /// -# comments) as a brief description. This used to be the default behaviour. -# The new default is to treat a multi-line C++ comment block as a detailed -# description. Set this tag to YES if you prefer the old behaviour instead. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the DETAILS_AT_TOP tag is set to YES then Doxygen -# will output the detailed description near the top, like JavaDoc. -# If set to NO, the detailed description appears after the member -# documentation. - -DETAILS_AT_TOP = YES - -# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented -# member inherits the documentation from any documented member that it -# re-implements. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce -# a new page for each member. If set to NO, the documentation of a member will -# be part of the file/class/namespace that contains it. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. -# Doxygen uses this value to replace tabs by spaces in code fragments. - -TAB_SIZE = 8 - -# This tag can be used to specify a number of aliases that acts -# as commands in the documentation. An alias has the form "name=value". -# For example adding "sideeffect=\par Side Effects:\n" will allow you to -# put the command \sideeffect (or @sideeffect) in the documentation, which -# will result in a user-defined paragraph with heading "Side Effects:". -# You can put \n's in the value part of an alias to insert newlines. - -ALIASES = - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C -# sources only. Doxygen will then generate output that is more tailored for C. -# For instance, some of the names that are used will be different. The list -# of all members will be omitted, etc. - -OPTIMIZE_OUTPUT_FOR_C = YES - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java -# sources only. Doxygen will then generate output that is more tailored for Java. -# For instance, namespaces will be presented as packages, qualified scopes -# will look different, etc. - -OPTIMIZE_OUTPUT_JAVA = NO - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want to -# include (a tag file for) the STL sources as input, then you should -# set this tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. -# func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. - -BUILTIN_STL_SUPPORT = NO - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES (the default) to allow class member groups of -# the same type (for instance a group of public functions) to be put as a -# subgroup of that type (e.g. under the Public Functions section). Set it to -# NO to prevent subgrouping. Alternatively, this can be done per class using -# the \nosubgrouping command. - -SUBGROUPING = YES - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. -# Private class members and static file members will be hidden unless -# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES - -EXTRACT_ALL = NO - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class -# will be included in the documentation. - -EXTRACT_PRIVATE = NO - -# If the EXTRACT_STATIC tag is set to YES all static members of a file -# will be included in the documentation. - -EXTRACT_STATIC = NO - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) -# defined locally in source files will be included in the documentation. -# If set to NO only classes defined in header files are included. - -EXTRACT_LOCAL_CLASSES = YES - -# This flag is only useful for Objective-C code. When set to YES local -# methods, which are defined in the implementation section but not in -# the interface are included in the documentation. -# If set to NO (the default) only methods in the interface are included. - -EXTRACT_LOCAL_METHODS = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all -# undocumented members of documented classes, files or namespaces. -# If set to NO (the default) these members will be included in the -# various overviews, but no documentation section is generated. -# This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. -# If set to NO (the default) these classes will be included in the various -# overviews. This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all -# friend (class|struct|union) declarations. -# If set to NO (the default) these declarations will be included in the -# documentation. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any -# documentation blocks found inside the body of a function. -# If set to NO (the default) these blocks will be appended to the -# function's detailed documentation block. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation -# that is typed after a \internal command is included. If the tag is set -# to NO (the default) then the documentation will be excluded. -# Set it to YES to include the internal documentation. - -INTERNAL_DOCS = NO - -# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate -# file names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. - -CASE_SENSE_NAMES = YES - -# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen -# will show members with their full class and namespace scopes in the -# documentation. If set to YES the scope will be hidden. - -HIDE_SCOPE_NAMES = YES - -# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen -# will put a list of the files that are included by a file in the documentation -# of that file. - -SHOW_INCLUDE_FILES = NO - -# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] -# is inserted in the documentation for inline members. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen -# will sort the (detailed) documentation of file and class members -# alphabetically by member name. If set to NO the members will appear in -# declaration order. - -SORT_MEMBER_DOCS = NO - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the -# brief documentation of file, namespace and class members alphabetically -# by member name. If set to NO (the default) the members will appear in -# declaration order. - -SORT_BRIEF_DOCS = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be -# sorted by fully-qualified names, including namespaces. If set to -# NO (the default), the class list will be sorted only by class name, -# not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the -# alphabetical list. - -SORT_BY_SCOPE_NAME = NO - -# The GENERATE_TODOLIST tag can be used to enable (YES) or -# disable (NO) the todo list. This list is created by putting \todo -# commands in the documentation. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or -# disable (NO) the test list. This list is created by putting \test -# commands in the documentation. - -GENERATE_TESTLIST = YES - -# The GENERATE_BUGLIST tag can be used to enable (YES) or -# disable (NO) the bug list. This list is created by putting \bug -# commands in the documentation. - -GENERATE_BUGLIST = YES - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or -# disable (NO) the deprecated list. This list is created by putting -# \deprecated commands in the documentation. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional -# documentation sections, marked by \if sectionname ... \endif. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines -# the initial value of a variable or define consists of for it to appear in -# the documentation. If the initializer consists of more lines than specified -# here it will be hidden. Use a value of 0 to hide initializers completely. -# The appearance of the initializer of individual variables and defines in the -# documentation can be controlled using \showinitializer or \hideinitializer -# command in the documentation regardless of this setting. - -MAX_INITIALIZER_LINES = 0 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated -# at the bottom of the documentation of classes and structs. If set to YES the -# list will mention the files that were used to generate the documentation. - -SHOW_USED_FILES = YES - -# If the sources in your project are distributed over multiple directories -# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy -# in the documentation. The default is NO. - -SHOW_DIRECTORIES = NO - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from the -# version control system). Doxygen will invoke the program by executing (via -# popen()) the command <command> <input-file>, where <command> is the value of -# the FILE_VERSION_FILTER tag, and <input-file> is the name of an input file -# provided by doxygen. Whatever the program writes to standard output -# is used as the file version. See the manual for examples. - -FILE_VERSION_FILTER = - -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated -# by doxygen. Possible values are YES and NO. If left blank NO is used. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated by doxygen. Possible values are YES and NO. If left blank -# NO is used. - -WARNINGS = YES - -# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings -# for undocumented members. If EXTRACT_ALL is set to YES then this flag will -# automatically be disabled. - -WARN_IF_UNDOCUMENTED = YES - -# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some -# parameters in a documented function, or documenting parameters that -# don't exist or using markup commands wrongly. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be abled to get warnings for -# functions that are documented, but have no documentation for their parameters -# or return value. If set to NO (the default) doxygen will only warn about -# wrong or incomplete parameter documentation, but not about the absence of -# documentation. - -WARN_NO_PARAMDOC = YES - -# The WARN_FORMAT tag determines the format of the warning messages that -# doxygen can produce. The string should contain the $file, $line, and $text -# tags, which will be replaced by the file and line number from which the -# warning originated and the warning text. Optionally the format may contain -# $version, which will be replaced by the version of the file (if it could -# be obtained via FILE_VERSION_FILTER) - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning -# and error messages should be written. If left blank the output is written -# to stderr. - -WARN_LOGFILE = - -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag can be used to specify the files and/or directories that contain -# documented source files. You may enter file names like "myfile.cpp" or -# directories like "/usr/src/myproject". Separate the files or directories -# with spaces. - -INPUT = cpluffdef.h cpluff.h mainpage.dox architecture.dox mainprog.dox plugin.dox - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank the following patterns are tested: -# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx -# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py - -FILE_PATTERNS = - -# The RECURSIVE tag can be used to turn specify whether or not subdirectories -# should be searched for input files as well. Possible values are YES and NO. -# If left blank NO is used. - -RECURSIVE = NO - -# The EXCLUDE tag can be used to specify files and/or directories that should -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used select whether or not files or -# directories that are symbolic links (a Unix filesystem feature) are excluded -# from the input. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. Note that the wildcards are matched -# against the file with absolute path, so to exclude all test directories -# for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or -# directories that contain example code fragments that are included (see -# the \include command). - -EXAMPLE_PATH = - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank all files are included. - -EXAMPLE_PATTERNS = - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude -# commands irrespective of the value of the RECURSIVE tag. -# Possible values are YES and NO. If left blank NO is used. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or -# directories that contain image that are included in the documentation (see -# the \image command). - -IMAGE_PATH = . - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command <filter> <input-file>, where <filter> -# is the value of the INPUT_FILTER tag, and <input-file> is the name of an -# input file. Doxygen will then use the output that the filter program writes -# to standard output. If FILTER_PATTERNS is specified, this tag will be -# ignored. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. The filters are a list of the form: -# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further -# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER -# is applied to all files. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will be used to filter the input files when producing source -# files to browse (i.e. when SOURCE_BROWSER is set to YES). - -FILTER_SOURCE_FILES = NO - -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will -# be generated. Documented entities will be cross-referenced with these sources. -# Note: To get rid of all source code in the generated output, make sure also -# VERBATIM_HEADERS is set to NO. - -SOURCE_BROWSER = NO - -# Setting the INLINE_SOURCES tag to YES will include the body -# of functions and classes directly in the documentation. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct -# doxygen to hide any special comment blocks from generated source code -# fragments. Normal C and C++ comments will always remain visible. - -STRIP_CODE_COMMENTS = YES - -# If the REFERENCED_BY_RELATION tag is set to YES (the default) -# then for each documented function all documented -# functions referencing it will be listed. - -REFERENCED_BY_RELATION = YES - -# If the REFERENCES_RELATION tag is set to YES (the default) -# then for each documented function all documented entities -# called/used by that function will be listed. - -REFERENCES_RELATION = YES - -# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) -# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from -# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will -# link to the source code. Otherwise they will link to the documentstion. - -REFERENCES_LINK_SOURCE = YES - -# If the USE_HTAGS tag is set to YES then the references to source code -# will point to the HTML generated by the htags(1) tool instead of doxygen -# built-in source browser. The htags tool is part of GNU's global source -# tagging system (see http://www.gnu.org/software/global/global.html). You -# will need version 4.8.6 or higher. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen -# will generate a verbatim copy of the header file for each class for -# which an include is specified. Set to NO to disable this. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index -# of all compounds will be generated. Enable this if the project -# contains a lot of classes, structs, unions or interfaces. - -ALPHABETICAL_INDEX = NO - -# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then -# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns -# in which this list will be split (can be a number in the range [1..20]) - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all -# classes will be put under the same header in the alphabetical index. -# The IGNORE_PREFIX tag can be used to specify one or more prefixes that -# should be ignored while generating the index headers. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES (the default) Doxygen will -# generate HTML output. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `html' will be used as the default path. - -HTML_OUTPUT = html - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for -# each generated HTML page (for example: .htm,.php,.asp). If it is left blank -# doxygen will generate files with .html extension. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a personal HTML header for -# each generated HTML page. If it is left blank doxygen will generate a -# standard header. - -HTML_HEADER = - -# The HTML_FOOTER tag can be used to specify a personal HTML footer for -# each generated HTML page. If it is left blank doxygen will generate a -# standard footer. - -HTML_FOOTER = doxygen.footer - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading -# style sheet that is used by each HTML page. It can be used to -# fine-tune the look of the HTML output. If the tag is left blank doxygen -# will generate a default style sheet. Note that doxygen will try to copy -# the style sheet file to the HTML output directory, so don't put your own -# stylesheet in the HTML output directory as well, or it will be erased! - -HTML_STYLESHEET = doxygen.css - -# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, -# files or namespaces will be aligned in HTML using tables. If set to -# NO a bullet list will be used. - -HTML_ALIGN_MEMBERS = YES - -# If the GENERATE_HTMLHELP tag is set to YES, additional index files -# will be generated that can be used as input for tools like the -# Microsoft HTML help workshop to generate a compressed HTML help file (.chm) -# of the generated HTML documentation. - -GENERATE_HTMLHELP = NO - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can -# be used to specify the file name of the resulting .chm file. You -# can add a path in front of the file if the result should not be -# written to the html output directory. - -CHM_FILE = - -# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can -# be used to specify the location (absolute path including file name) of -# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run -# the HTML help compiler on the generated index.hhp. - -HHC_LOCATION = - -# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag -# controls if a separate .chi index file is generated (YES) or that -# it should be included in the master .chm file (NO). - -GENERATE_CHI = NO - -# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag -# controls whether a binary table of contents is generated (YES) or a -# normal table of contents (NO) in the .chm file. - -BINARY_TOC = NO - -# The TOC_EXPAND flag can be set to YES to add extra items for group members -# to the contents of the HTML help documentation and to the tree view. - -TOC_EXPAND = NO - -# The DISABLE_INDEX tag can be used to turn on/off the condensed index at -# top of each HTML page. The value NO (the default) enables the index and -# the value YES disables it. - -DISABLE_INDEX = NO - -# This tag can be used to set the number of enum values (range [1..20]) -# that doxygen will group on one line in the generated HTML documentation. - -ENUM_VALUES_PER_LINE = 4 - -# If the GENERATE_TREEVIEW tag is set to YES, a side panel will be -# generated containing a tree-like index structure (just like the one that -# is generated for HTML Help). For this to work a browser that supports -# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, -# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are -# probably better off using the HTML help feature. - -GENERATE_TREEVIEW = NO - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be -# used to set the initial width (in pixels) of the frame in which the tree -# is shown. - -TREEVIEW_WIDTH = 250 - -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- - -# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will -# generate Latex output. - -GENERATE_LATEX = YES - -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `latex' will be used as the default path. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. If left blank `latex' will be used as the default command name. - -LATEX_CMD_NAME = latex - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to -# generate index for LaTeX. If left blank `makeindex' will be used as the -# default command name. - -MAKEINDEX_CMD_NAME = makeindex - -# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact -# LaTeX documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_LATEX = NO - -# The PAPER_TYPE tag can be used to set the paper type that is used -# by the printer. Possible values are: a4, a4wide, letter, legal and -# executive. If left blank a4wide will be used. - -PAPER_TYPE = a4wide - -# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX -# packages that should be included in the LaTeX output. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for -# the generated latex document. The header should contain everything until -# the first chapter. If it is left blank doxygen will generate a -# standard header. Notice: only use this tag if you know what you are doing! - -LATEX_HEADER = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated -# is prepared for conversion to pdf (using ps2pdf). The pdf file will -# contain links (just like the HTML output) instead of page references -# This makes the output suitable for online browsing using a pdf viewer. - -PDF_HYPERLINKS = NO - -# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of -# plain latex in the generated Makefile. Set this option to YES to get a -# higher quality PDF documentation. - -USE_PDFLATEX = NO - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. -# command to the generated LaTeX files. This will instruct LaTeX to keep -# running if errors occur, instead of asking the user for help. -# This option is also used when generating formulas in HTML. - -LATEX_BATCHMODE = NO - -# If LATEX_HIDE_INDICES is set to YES then doxygen will not -# include the index chapters (such as File Index, Compound Index, etc.) -# in the output. - -LATEX_HIDE_INDICES = NO - -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- - -# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output -# The RTF output is optimized for Word 97 and may not look very pretty with -# other RTF readers or editors. - -GENERATE_RTF = NO - -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `rtf' will be used as the default path. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES Doxygen generates more compact -# RTF documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated -# will contain hyperlink fields. The RTF file will -# contain links (just like the HTML output) instead of page references. -# This makes the output suitable for online browsing using WORD or other -# programs which support those fields. -# Note: wordpad (write) and others do not support links. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# config file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an rtf document. -# Syntax is similar to doxygen's config file. - -RTF_EXTENSIONS_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- - -# If the GENERATE_MAN tag is set to YES (the default) Doxygen will -# generate man pages - -GENERATE_MAN = NO - -# The MAN_OUTPUT tag is used to specify where the man pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `man' will be used as the default path. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to -# the generated man pages (default is the subroutine's section .3) - -MAN_EXTENSION = .3 - -# If the MAN_LINKS tag is set to YES and Doxygen generates man output, -# then it will generate one additional man file for each entity -# documented in the real man page(s). These additional files -# only source the real man page, but without them the man command -# would be unable to find the correct page. The default is NO. - -MAN_LINKS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- - -# If the GENERATE_XML tag is set to YES Doxygen will -# generate an XML file that captures the structure of -# the code including all documentation. - -GENERATE_XML = NO - -# The XML_OUTPUT tag is used to specify where the XML pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `xml' will be used as the default path. - -XML_OUTPUT = xml - -# The XML_SCHEMA tag can be used to specify an XML schema, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify an XML DTD, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES Doxygen will -# dump the program listings (including syntax highlighting -# and cross-referencing information) to the XML output. Note that -# enabling this will significantly increase the size of the XML output. - -XML_PROGRAMLISTING = YES - -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- - -# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will -# generate an AutoGen Definitions (see autogen.sf.net) file -# that captures the structure of the code including all -# documentation. Note that this feature is still experimental -# and incomplete at the moment. - -GENERATE_AUTOGEN_DEF = NO - -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- - -# If the GENERATE_PERLMOD tag is set to YES Doxygen will -# generate a Perl module file that captures the structure of -# the code including all documentation. Note that this -# feature is still experimental and incomplete at the -# moment. - -GENERATE_PERLMOD = NO - -# If the PERLMOD_LATEX tag is set to YES Doxygen will generate -# the necessary Makefile rules, Perl scripts and LaTeX code to be able -# to generate PDF and DVI output from the Perl module output. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be -# nicely formatted so it can be parsed by a human reader. This is useful -# if you want to understand what is going on. On the other hand, if this -# tag is set to NO the size of the Perl module output will be much smaller -# and Perl will parse it just the same. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file -# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. -# This is useful so different doxyrules.make files included by the same -# Makefile don't overwrite each other's variables. - -PERLMOD_MAKEVAR_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- - -# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will -# evaluate all C-preprocessor directives found in the sources and include -# files. - -ENABLE_PREPROCESSING = YES - -# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro -# names in the source code. If set to NO (the default) only conditional -# compilation will be performed. Macro expansion can be done in a controlled -# way by setting EXPAND_ONLY_PREDEF to YES. - -MACRO_EXPANSION = YES - -# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES -# then the macro expansion is limited to the macros specified with the -# PREDEFINED and EXPAND_AS_DEFINED tags. - -EXPAND_ONLY_PREDEF = YES - -# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files -# in the INCLUDE_PATH (see below) will be search if a #include is found. - -SEARCH_INCLUDES = YES - -# The INCLUDE_PATH tag can be used to specify one or more directories that -# contain include files that are not input files but should be processed by -# the preprocessor. - -INCLUDE_PATH = - -# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard -# patterns (like *.h and *.hpp) to filter out the header-files in the -# directories. If left blank, the patterns specified with FILE_PATTERNS will -# be used. - -INCLUDE_FILE_PATTERNS = - -# The PREDEFINED tag can be used to specify one or more macro names that -# are defined before the preprocessor is started (similar to the -D option of -# gcc). The argument of the tag is a list of macros of the form: name -# or name=definition (no spaces). If the definition and the = are -# omitted =1 is assumed. To prevent a macro definition from being -# undefined via #undef or recursively expanded use the := operator -# instead of the = operator. - -PREDEFINED = - -# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then -# this tag can be used to specify a list of macro names that should be expanded. -# The macro definition that is found in the sources will be used. -# Use the PREDEFINED tag if you want to use a different macro definition. - -EXPAND_AS_DEFINED = CP_GCC_PURE CP_GCC_NONNULL CP_C_API - -# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then -# doxygen's preprocessor will remove all function-like macros that are alone -# on a line, have an all uppercase name, and do not end with a semicolon. Such -# function macros are typically used for boiler-plate code, and will confuse -# the parser if not removed. - -SKIP_FUNCTION_MACROS = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- - -# The TAGFILES option can be used to specify one or more tagfiles. -# Optionally an initial location of the external documentation -# can be added for each tagfile. The format of a tag file without -# this location is as follows: -# TAGFILES = file1 file2 ... -# Adding location for the tag files is done as follows: -# TAGFILES = file1=loc1 "file2 = loc2" ... -# where "loc1" and "loc2" can be relative or absolute paths or -# URLs. If a location is present for each tag, the installdox tool -# does not have to be run to correct the links. -# Note that each tag file must have a unique name -# (where the name does NOT include the path) -# If a tag file is not located in the directory in which doxygen -# is run, you must also specify the path to the tagfile here. - -TAGFILES = - -# When a file name is specified after GENERATE_TAGFILE, doxygen will create -# a tag file that is based on the input files it reads. - -GENERATE_TAGFILE = - -# If the ALLEXTERNALS tag is set to YES all external classes will be listed -# in the class index. If set to NO only the inherited external classes -# will be listed. - -ALLEXTERNALS = NO - -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will -# be listed. - -EXTERNAL_GROUPS = YES - -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of `which perl'). - -PERL_PATH = /usr/bin/perl - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will -# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base -# or super classes. Setting the tag to NO turns the diagrams off. Note that -# this option is superseded by the HAVE_DOT option below. This is only a -# fallback. It is recommended to install and use dot, since it yields more -# powerful graphs. - -CLASS_DIAGRAMS = YES - -# If set to YES, the inheritance and collaboration graphs will hide -# inheritance and usage relations if the target is undocumented -# or is not a class. - -HIDE_UNDOC_RELATIONS = YES - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz, a graph visualization -# toolkit from AT&T and Lucent Bell Labs. The other options in this section -# have no effect if this option is set to NO (the default) - -HAVE_DOT = YES - -# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect inheritance relations. Setting this tag to YES will force the -# the CLASS_DIAGRAMS tag to NO. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect implementation dependencies (inheritance, containment, and -# class references variables) of the class with other documented classes. - -COLLABORATION_GRAPH = YES - -# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for groups, showing the direct groups dependencies - -GROUP_GRAPHS = NO - -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. - -UML_LOOK = NO - -# If set to YES, the inheritance and collaboration graphs will show the -# relations between templates and their instances. - -TEMPLATE_RELATIONS = NO - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT -# tags are set to YES then doxygen will generate a graph for each documented -# file showing the direct and indirect include dependencies of the file with -# other documented files. - -INCLUDE_GRAPH = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and -# HAVE_DOT tags are set to YES then doxygen will generate a graph for each -# documented header file showing the documented files that directly or -# indirectly include this file. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH and HAVE_DOT tags are set to YES then doxygen will -# generate a call dependency graph for every global function or class method. -# Note that enabling this option will significantly increase the time of a run. -# So in most cases it will be better to enable call graphs for selected -# functions only using the \callgraph command. - -CALL_GRAPH = NO - -# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then doxygen will -# generate a caller dependency graph for every global function or class method. -# Note that enabling this option will significantly increase the time of a run. -# So in most cases it will be better to enable caller graphs for selected -# functions only using the \callergraph command. - -CALLER_GRAPH = NO - -# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen -# will graphical hierarchy of all classes instead of a textual one. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES -# then doxygen will show the dependencies a directory has on other directories -# in a graphical way. The dependency relations are determined by the #include -# relations between the files in the directories. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. Possible values are png, jpg, or gif -# If left blank png will be used. - -DOT_IMAGE_FORMAT = png - -# The tag DOT_PATH can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. - -DOT_PATH = - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the -# \dotfile command). - -DOTFILE_DIRS = - -# The MAX_DOT_GRAPH_WIDTH tag can be used to set the maximum allowed width -# (in pixels) of the graphs generated by dot. If a graph becomes larger than -# this value, doxygen will try to truncate the graph, so that it fits within -# the specified constraint. Beware that most browsers cannot cope with very -# large images. - -MAX_DOT_GRAPH_WIDTH = 1024 - -# The MAX_DOT_GRAPH_HEIGHT tag can be used to set the maximum allows height -# (in pixels) of the graphs generated by dot. If a graph becomes larger than -# this value, doxygen will try to truncate the graph, so that it fits within -# the specified constraint. Beware that most browsers cannot cope with very -# large images. - -MAX_DOT_GRAPH_HEIGHT = 1024 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the -# graphs generated by dot. A depth value of 3 means that only nodes reachable -# from the root by following a path via at most 3 edges will be shown. Nodes -# that lay further from the root node will be omitted. Note that setting this -# option to 1 or 2 may greatly reduce the computation time needed for large -# code bases. Also note that a graph may be further truncated if the graph's -# image dimensions are not sufficient to fit the graph (see MAX_DOT_GRAPH_WIDTH -# and MAX_DOT_GRAPH_HEIGHT). If 0 is used for the depth value (the default), -# the graph is not depth-constrained. - -MAX_DOT_GRAPH_DEPTH = 0 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, which results in a white background. -# Warning: Depending on the platform used, enabling this option may lead to -# badly anti-aliased labels on the edges of a graph (i.e. they become hard to -# read). - -DOT_TRANSPARENT = YES - -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) -# support this, this feature is disabled by default. - -DOT_MULTI_TARGETS = NO - -# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will -# generate a legend page explaining the meaning of the various boxes and -# arrows in the dot generated graphs. - -GENERATE_LEGEND = YES - -# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will -# remove the intermediate dot files that are used to generate -# the various graphs. - -DOT_CLEANUP = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to the search engine -#--------------------------------------------------------------------------- - -# The SEARCHENGINE tag specifies whether or not a search engine should be -# used. If set to NO the values of all tags below this one will be ignored. - -SEARCHENGINE = NO diff --git a/xbmc/lib/cpluff-0.1.3/libcpluff/pcontrol.c b/xbmc/lib/cpluff-0.1.3/libcpluff/pcontrol.c deleted file mode 100644 index c5a3310a0d..0000000000 --- a/xbmc/lib/cpluff-0.1.3/libcpluff/pcontrol.c +++ /dev/null @@ -1,1241 +0,0 @@ -/*------------------------------------------------------------------------- - * C-Pluff, a plug-in framework for C - * Copyright 2007 Johannes Lehtinen - * - * Permission is hereby granted, free of charge, to any person obtaining a - * copy of this software and associated documentation files (the "Software"), - * to deal in the Software without restriction, including without limitation - * the rights to use, copy, modify, merge, publish, distribute, sublicense, - * and/or sell copies of the Software, and to permit persons to whom the - * Software is furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included - * in all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS - * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF - * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. - * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY - * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, - * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE - * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - *-----------------------------------------------------------------------*/ - -/** @file - * Core plug-in management functions - */ - -#ifdef HAVE_CONFIG_H -#include "config.h" -#endif - -#include <stdio.h> -#include <stdlib.h> -#include <assert.h> -#include <string.h> -#include <stddef.h> -#include "../kazlib/list.h" -#include "../kazlib/hash.h" -#include "cpluff.h" -#include "defines.h" -#include "util.h" -#include "internal.h" - - -/* ------------------------------------------------------------------------ - * Function definitions - * ----------------------------------------------------------------------*/ - -// Plug-in control - -#ifndef NDEBUG -static void assert_processed_zero(cp_context_t *context) { - hscan_t scan; - hnode_t *node; - - hash_scan_begin(&scan, context->env->plugins); - while ((node = hash_scan_next(&scan)) != NULL) { - cp_plugin_t *plugin = hnode_get(node); - assert(plugin->processed == 0); - } -} -#else -#define assert_processed_zero(c) assert(1) -#endif - -static void unregister_extensions(cp_context_t *context, cp_plugin_info_t *plugin) { - int i; - - for (i = 0; i < plugin->num_ext_points; i++) { - cp_ext_point_t *ep = plugin->ext_points + i; - hnode_t *hnode; - - if ((hnode = hash_lookup(context->env->ext_points, ep->identifier)) != NULL - && hnode_get(hnode) == ep) { - hash_delete_free(context->env->ext_points, hnode); - } - } - for (i = 0; i < plugin->num_extensions; i++) { - cp_extension_t *e = plugin->extensions + i; - hnode_t *hnode; - - if ((hnode = hash_lookup(context->env->extensions, e->ext_point_id)) != NULL) { - list_t *el = hnode_get(hnode); - lnode_t *lnode = list_first(el); - - while (lnode != NULL) { - lnode_t *nn = list_next(el, lnode); - if (lnode_get(lnode) == e) { - list_delete(el, lnode); - lnode_destroy(lnode); - break; - } - lnode = nn; - } - if (list_isempty(el)) { - char *epid = (char *) hnode_getkey(hnode); - hash_delete_free(context->env->extensions, hnode); - free(epid); - list_destroy(el); - } - } - } -} - -CP_C_API cp_status_t cp_install_plugin(cp_context_t *context, cp_plugin_info_t *plugin) { - cp_plugin_t *rp = NULL; - cp_status_t status = CP_OK; - cpi_plugin_event_t event; - int i; - - CHECK_NOT_NULL(context); - CHECK_NOT_NULL(plugin); - - cpi_lock_context(context); - cpi_check_invocation(context, CPI_CF_ANY, __func__); - do { - - // Check that there is no conflicting plug-in already loaded - if (hash_lookup(context->env->plugins, plugin->identifier) != NULL) { - cpi_errorf(context, - N_("Plug-in %s could not be installed because a plug-in with the same identifier is already installed."), - plugin->identifier); - status = CP_ERR_CONFLICT; - break; - } - - // Increase usage count for the plug-in descriptor - cpi_use_info(context, plugin); - - // Allocate space for the plug-in state - if ((rp = malloc(sizeof(cp_plugin_t))) == NULL) { - status = CP_ERR_RESOURCE; - break; - } - - // Initialize plug-in state - memset(rp, 0, sizeof(cp_plugin_t)); - rp->context = NULL; - rp->plugin = plugin; - rp->state = CP_PLUGIN_INSTALLED; - rp->imported = NULL; - rp->runtime_lib = NULL; - rp->runtime_funcs = NULL; - rp->plugin_data = NULL; - rp->importing = list_create(LISTCOUNT_T_MAX); - if (rp->importing == NULL) { - status = CP_ERR_RESOURCE; - break; - } - if (!hash_alloc_insert(context->env->plugins, plugin->identifier, rp)) { - status = CP_ERR_RESOURCE; - break; - } - - // Register extension points - for (i = 0; status == CP_OK && i < plugin->num_ext_points; i++) { - cp_ext_point_t *ep = plugin->ext_points + i; - hnode_t *hnode; - - if ((hnode = hash_lookup(context->env->ext_points, ep->identifier)) != NULL) { - cpi_errorf(context, N_("Plug-in %s could not be installed because extension point %s conflicts with an already installed extension point."), plugin->identifier, ep->identifier); - status = CP_ERR_CONFLICT; - } else if (!hash_alloc_insert(context->env->ext_points, ep->identifier, ep)) { - status = CP_ERR_RESOURCE; - } - } - - // Register extensions - for (i = 0; status == CP_OK && i < plugin->num_extensions; i++) { - cp_extension_t *e = plugin->extensions + i; - hnode_t *hnode; - lnode_t *lnode; - list_t *el; - - if ((hnode = hash_lookup(context->env->extensions, e->ext_point_id)) == NULL) { - char *epid; - if ((el = list_create(LISTCOUNT_T_MAX)) != NULL - && (epid = strdup(e->ext_point_id)) != NULL) { - if (!hash_alloc_insert(context->env->extensions, epid, el)) { - list_destroy(el); - status = CP_ERR_RESOURCE; - break; - } - } else { - if (el != NULL) { - list_destroy(el); - } - status = CP_ERR_RESOURCE; - break; - } - } else { - el = hnode_get(hnode); - } - if ((lnode = lnode_create(e)) != NULL) { - list_append(el, lnode); - } else { - status = CP_ERR_RESOURCE; - break; - } - } - - // Break if previous loops failed - if (status != CP_OK) { - break; - } - - // Plug-in installed - event.plugin_id = plugin->identifier; - event.old_state = CP_PLUGIN_UNINSTALLED; - event.new_state = rp->state; - cpi_deliver_event(context, &event); - - } while (0); - - // Release resources on failure - if (status != CP_OK) { - if (rp != NULL) { - if (rp->importing != NULL) { - list_destroy(rp->importing); - } - free(rp); - } - unregister_extensions(context, plugin); - } - - // Report possible resource error - if (status == CP_ERR_RESOURCE) { - cpi_errorf(context, - N_("Plug-in %s could not be installed due to insufficient system resources."), plugin->identifier); - } - cpi_unlock_context(context); - - return status; -} - -/** - * Unresolves the plug-in runtime information. - * - * @param plugin the plug-in to unresolve - */ -static void unresolve_plugin_runtime(cp_plugin_t *plugin) { - - // Destroy the plug-in instance, if necessary - if (plugin->context != NULL) { - plugin->context->env->in_destroy_func_invocation++; - plugin->runtime_funcs->destroy(plugin->plugin_data); - plugin->context->env->in_destroy_func_invocation--; - plugin->plugin_data = NULL; - cpi_free_context(plugin->context); - plugin->context = NULL; - } - - // Close plug-in runtime library - plugin->runtime_funcs = NULL; - if (plugin->runtime_lib != NULL) { - DLCLOSE(plugin->runtime_lib); - plugin->runtime_lib = NULL; - } -} - -/** - * Loads and resolves the plug-in runtime library and initialization functions. - * - * @param context the plug-in context - * @param plugin the plugin - * @return CP_OK (zero) on success or error code on failure - */ -static int resolve_plugin_runtime(cp_context_t *context, cp_plugin_t *plugin) { - char *rlpath = NULL; - int rlpath_len; - cp_status_t status = CP_OK; - - assert(plugin->runtime_lib == NULL); - if (plugin->plugin->runtime_lib_name == NULL) { - return CP_OK; - } - - do { - int ppath_len, lname_len; - int cpluff_compatibility = 1; - - // Check C-Pluff compatibility - if (plugin->plugin->req_cpluff_version != NULL) { -#ifdef CP_ABI_COMPATIBILITY - cpluff_compatibility = ( - cpi_vercmp(plugin->plugin->req_cpluff_version, CP_VERSION) <= 0 - && cpi_vercmp(plugin->plugin->req_cpluff_version, CP_ABI_COMPATIBILITY) >= 0); -#else - cpluff_compatibility = (cpi_vercmp(plugin->plugin->req_cpluff_version, CP_VERSION) == 0); -#endif - } - if (!cpluff_compatibility) { - cpi_errorf(context, N_("Plug-in %s could not be resolved due to version incompatibility with C-Pluff."), plugin->plugin->identifier); - status = CP_ERR_DEPENDENCY; - break; - } - - // Construct a path to plug-in runtime library. - /// @todo Add platform specific prefix (for example, "lib") - ppath_len = strlen(plugin->plugin->plugin_path); - lname_len = strlen(plugin->plugin->runtime_lib_name); - rlpath_len = ppath_len + lname_len + strlen(CP_SHREXT) + 2; - if ((rlpath = malloc(rlpath_len * sizeof(char))) == NULL) { - cpi_errorf(context, N_("Plug-in %s runtime library could not be loaded due to insufficient memory."), plugin->plugin->identifier); - status = CP_ERR_RESOURCE; - break; - } - memset(rlpath, 0, rlpath_len * sizeof(char)); - strcpy(rlpath, plugin->plugin->plugin_path); - rlpath[ppath_len] = CP_FNAMESEP_CHAR; - strcpy(rlpath + ppath_len + 1, plugin->plugin->runtime_lib_name); - strcpy(rlpath + ppath_len + 1 + lname_len, CP_SHREXT); - - // Open the plug-in runtime library - plugin->runtime_lib = DLOPEN(rlpath); - if (plugin->runtime_lib == NULL) { - const char *error = DLERROR(); - if (error == NULL) { - error = _("Unspecified error."); - } - cpi_errorf(context, N_("Plug-in %s runtime library %s could not be opened: %s"), plugin->plugin->identifier, rlpath, error); - status = CP_ERR_RUNTIME; - break; - } - - // Resolve plug-in functions - if (plugin->plugin->runtime_funcs_symbol != NULL) { - plugin->runtime_funcs = (cp_plugin_runtime_t *) DLSYM(plugin->runtime_lib, plugin->plugin->runtime_funcs_symbol); - if (plugin->runtime_funcs == NULL) { - const char *error = DLERROR(); - if (error == NULL) { - error = _("Unspecified error."); - } - cpi_errorf(context, N_("Plug-in %s symbol %s containing plug-in runtime information could not be resolved: %s"), plugin->plugin->identifier, plugin->plugin->runtime_funcs_symbol, error); - status = CP_ERR_RUNTIME; - break; - } - if (plugin->runtime_funcs->create == NULL - || plugin->runtime_funcs->destroy == NULL) { - cpi_errorf(context, N_("Plug-in %s is missing a constructor or destructor function."), plugin->plugin->identifier); - status = CP_ERR_RUNTIME; - break; - } - } - - } while (0); - - // Release resources - free(rlpath); - if (status != CP_OK) { - unresolve_plugin_runtime(plugin); - } - - return status; -} - -/** - * Resolves the specified plug-in import into a plug-in pointer. Does not - * try to resolve the imported plug-in. - * - * @param context the plug-in context - * @param plugin the plug-in being resolved - * @param import the plug-in import to resolve - * @param ipptr filled with pointer to the resolved plug-in or NULL - * @return CP_OK on success or error code on failure - */ -static int resolve_plugin_import(cp_context_t *context, cp_plugin_t *plugin, cp_plugin_import_t *import, cp_plugin_t **ipptr) { - cp_plugin_t *ip = NULL; - hnode_t *node; - - // Lookup the plug-in - node = hash_lookup(context->env->plugins, import->plugin_id); - if (node != NULL) { - ip = hnode_get(node); - } - - // Check plug-in version - if (ip != NULL - && import->version != NULL - && (ip->plugin->version == NULL - || (ip->plugin->abi_bw_compatibility == NULL - && cpi_vercmp(import->version, ip->plugin->version) != 0) - || (ip->plugin->abi_bw_compatibility != NULL - && (cpi_vercmp(import->version, ip->plugin->version) > 0 - || cpi_vercmp(import->version, ip->plugin->abi_bw_compatibility) < 0)))) { - cpi_errorf(context, - N_("Plug-in %s could not be resolved due to version incompatibility with plug-in %s."), - plugin->plugin->identifier, - import->plugin_id); - *ipptr = NULL; - return CP_ERR_DEPENDENCY; - } - - // Check if missing mandatory plug-in - if (ip == NULL && !import->optional) { - cpi_errorf(context, - N_("Plug-in %s could not be resolved because it depends on plug-in %s which is not installed."), - plugin->plugin->identifier, - import->plugin_id); - *ipptr = NULL; - return CP_ERR_DEPENDENCY; - } - - // Return imported plug-in - *ipptr = ip; - return CP_OK; -} - -/** - * Resolves the specified plug-in and its dependencies while leaving plug-ins - * with circular dependencies in a preliminarily resolved state. - * - * @param context the plug-in context - * @param plugin the plug-in - * @return CP_OK (zero) or CP_OK_PRELIMINARY or an error code - */ -static int resolve_plugin_prel_rec(cp_context_t *context, cp_plugin_t *plugin) { - cp_status_t status = CP_OK; - int error_reported = 0; - lnode_t *node = NULL; - int i; - - // Check if already resolved - if (plugin->state >= CP_PLUGIN_RESOLVED) { - return CP_OK; - } - - // Check for dependency loops - if (plugin->processed) { - return CP_OK_PRELIMINARY; - } - plugin->processed = 1; - - do { - - // Recursively resolve the imported plug-ins - assert(plugin->imported == NULL); - if ((plugin->imported = list_create(LISTCOUNT_T_MAX)) == NULL) { - status = CP_ERR_RESOURCE; - break; - } - for (i = 0; i < plugin->plugin->num_imports; i++) { - cp_plugin_t *ip; - int s; - - if ((node = lnode_create(NULL)) == NULL) { - status = CP_ERR_RESOURCE; - break; - } - if ((s = resolve_plugin_import(context, plugin, plugin->plugin->imports + i, &ip)) != CP_OK) { - error_reported = 1; - status = s; - break; - } - if (ip != NULL) { - lnode_put(node, ip); - list_append(plugin->imported, node); - node = NULL; - if (!cpi_ptrset_add(ip->importing, plugin)) { - status = CP_ERR_RESOURCE; - break; - } else if ((s = resolve_plugin_prel_rec(context, ip)) != CP_OK && s != CP_OK_PRELIMINARY) { - cpi_errorf(context, N_("Plug-in %s could not be resolved because it depends on plug-in %s which could not be resolved."), plugin->plugin->identifier, ip->plugin->identifier); - error_reported = 1; - status = s; - break; - } - } else { - lnode_destroy(node); - node = NULL; - } - } - if (status != CP_OK) { - break; - } - - // Resolve this plug-in - assert(plugin->state == CP_PLUGIN_INSTALLED); - if ((i = resolve_plugin_runtime(context, plugin)) != CP_OK) { - status = i; - error_reported = 1; - break; - } - - // Notify event listeners and update state if completely resolved - if (status == CP_OK) { - cpi_plugin_event_t event; - - plugin->processed = 0; - event.plugin_id = plugin->plugin->identifier; - event.old_state = plugin->state; - event.new_state = plugin->state = CP_PLUGIN_RESOLVED; - cpi_deliver_event(context, &event); - } - - } while (0); - - // Clean up - if (node != NULL) { - lnode_destroy(node); - } - - // Handle errors - if (status == CP_ERR_RESOURCE && !error_reported) { - cpi_errorf(context, N_("Plug-in %s could not be resolved because of insufficient memory."), plugin->plugin->identifier); - } - - return status; -} - -/** - * Recursively commits the resolving process for the specified plug-in and - * its dependencies. - * - * @param context the plug-in context - * @param plugin the plug-in - */ -static void resolve_plugin_commit_rec(cp_context_t *context, cp_plugin_t *plugin) { - - // Check if already committed - if (!plugin->processed) { - return; - } - plugin->processed = 0; - - // Commit if only preliminarily resolved - if (plugin->state < CP_PLUGIN_RESOLVED) { - cpi_plugin_event_t event; - lnode_t *node; - - // Recursively commit dependencies - node = list_first(plugin->imported); - while (node != NULL) { - resolve_plugin_commit_rec(context, (cp_plugin_t *) lnode_get(node)); - node = list_next(plugin->imported, node); - } - - // Notify event listeners and update state - event.plugin_id = plugin->plugin->identifier; - event.old_state = plugin->state; - event.new_state = plugin->state = CP_PLUGIN_RESOLVED; - cpi_deliver_event(context, &event); - } -} - -/** - * Recursively cleans up the specified plug-in and its dependencies after - * a failed resolving attempt. - * - * @param plugin the plug-in - */ -static void resolve_plugin_failed_rec(cp_plugin_t *plugin) { - - // Check if already cleaned up - if (!plugin->processed) { - return; - } - plugin->processed = 0; - - // Clean up if only preliminarily resolved - if (plugin->state < CP_PLUGIN_RESOLVED) { - lnode_t *node; - - // Recursively clean up depedencies - while ((node = list_first(plugin->imported)) != NULL) { - cp_plugin_t *ip = lnode_get(node); - - resolve_plugin_failed_rec(ip); - cpi_ptrset_remove(ip->importing, plugin); - list_delete(plugin->imported, node); - lnode_destroy(node); - } - list_destroy(plugin->imported); - plugin->imported = NULL; - } -} - -/** - * Resolves the specified plug-in and its dependencies. - * - * @param context the plug-in context - * @param plugin the plug-in to be resolved - * @return CP_OK (zero) on success or an error code on failure - */ -static int resolve_plugin(cp_context_t *context, cp_plugin_t *plugin) { - cp_status_t status; - - if ((status = resolve_plugin_prel_rec(context, plugin)) == CP_OK || status == CP_OK_PRELIMINARY) { - status = CP_OK; - resolve_plugin_commit_rec(context, plugin); - } else { - resolve_plugin_failed_rec(plugin); - } - assert_processed_zero(context); - return status; -} - -/** - * Starts the plug-in runtime of the specified plug-in. This function does - * not consider dependencies and assumes that the plug-in is resolved but - * not yet started. - * - * @param context the plug-in context - * @param plugin the plug-in - * @return CP_OK (zero) on success or an error code on failure - */ -static int start_plugin_runtime(cp_context_t *context, cp_plugin_t *plugin) { - cp_status_t status = CP_OK; - cpi_plugin_event_t event; - lnode_t *node = NULL; - - event.plugin_id = plugin->plugin->identifier; - do { - - // Allocate space for the list node - node = lnode_create(plugin); - if (node == NULL) { - status = CP_ERR_RESOURCE; - break; - } - - // Set up plug-in instance - if (plugin->runtime_funcs != NULL) { - - // Create plug-in instance if necessary - if (plugin->context == NULL) { - if ((plugin->context = cpi_new_context(plugin, context->env, &status)) == NULL) { - break; - } - context->env->in_create_func_invocation++; - plugin->plugin_data = plugin->runtime_funcs->create(plugin->context); - context->env->in_create_func_invocation--; - if (plugin->plugin_data == NULL) { - status = CP_ERR_RUNTIME; - break; - } - } - - // Start plug-in - if (plugin->runtime_funcs->start != NULL) { - int s; - - // About to start the plug-in - event.old_state = plugin->state; - event.new_state = plugin->state = CP_PLUGIN_STARTING; - cpi_deliver_event(context, &event); - - // Start the plug-in - context->env->in_start_func_invocation++; - s = plugin->runtime_funcs->start(plugin->plugin_data); - context->env->in_start_func_invocation--; - - if (s != CP_OK) { - - // Roll back plug-in state - if (plugin->runtime_funcs->stop != NULL) { - - // Update state - event.old_state = plugin->state; - event.new_state = plugin->state = CP_PLUGIN_STOPPING; - cpi_deliver_event(context, &event); - - // Call stop function - context->env->in_stop_func_invocation++; - plugin->runtime_funcs->stop(plugin->plugin_data); - context->env->in_stop_func_invocation--; - } - - // Destroy plug-in object - context->env->in_destroy_func_invocation++; - plugin->runtime_funcs->destroy(plugin->plugin_data); - context->env->in_destroy_func_invocation--; - - status = CP_ERR_RUNTIME; - break; - } - } - } - - // Plug-in active - list_append(context->env->started_plugins, node); - event.old_state = plugin->state; - event.new_state = plugin->state = CP_PLUGIN_ACTIVE; - cpi_deliver_event(context, &event); - - } while (0); - - // Release resources and roll back plug-in state on failure - if (status != CP_OK) { - if (node != NULL) { - lnode_destroy(node); - } - if (plugin->context != NULL) { - cpi_free_context(plugin->context); - plugin->context = NULL; - } - if (plugin->state != CP_PLUGIN_RESOLVED) { - event.old_state = plugin->state; - event.new_state = plugin->state = CP_PLUGIN_RESOLVED; - cpi_deliver_event(context, &event); - } - plugin->plugin_data = NULL; - } - - // Report error on failure - switch (status) { - case CP_ERR_RESOURCE: - cpi_errorf(context, - N_("Plug-in %s could not be started due to insufficient memory."), - plugin->plugin->identifier); - break; - case CP_ERR_RUNTIME: - cpi_errorf(context, - N_("Plug-in %s failed to start due to plug-in runtime error."), - plugin->plugin->identifier); - break; - default: - break; - } - - return status; -} - -static void warn_dependency_loop(cp_context_t *context, cp_plugin_t *plugin, list_t *importing, int dynamic) { - char *msgbase; - char *msg; - int msgsize; - lnode_t *node; - - // Take the message base - if (dynamic) { - msgbase = N_("Detected a runtime plug-in dependency loop: %s"); - } else { - msgbase = N_("Detected a static plug-in dependency loop: %s"); - } - - // Calculate the required message space - msgsize = 0; - msgsize += strlen(plugin->plugin->identifier); - msgsize += 2; - node = list_last(importing); - while (node != NULL) { - cp_plugin_t *p = lnode_get(node); - if (p == plugin) { - break; - } - msgsize += strlen(p->plugin->identifier); - msgsize += 2; - node = list_prev(importing, node); - } - msg = malloc(sizeof(char) * msgsize); - if (msg != NULL) { - strcpy(msg, plugin->plugin->identifier); - node = list_last(importing); - while (node != NULL) { - cp_plugin_t *p = lnode_get(node); - if (p == plugin) { - break; - } - strcat(msg, ", "); - strcat(msg, p->plugin->identifier); - node = list_prev(importing, node); - } - strcat(msg, "."); - cpi_infof(context, msgbase, msg); - free(msg); - } else { - cpi_infof(context, msgbase, plugin->plugin->identifier); - } -} - -/** - * Starts the specified plug-in and its dependencies. - * - * @param context the plug-in context - * @param plugin the plug-in - * @param importing stack of importing plug-ins - * @return CP_OK (zero) on success or an error code on failure - */ -static int start_plugin_rec(cp_context_t *context, cp_plugin_t *plugin, list_t *importing) { - cp_status_t status = CP_OK; - lnode_t *node; - - // Check if already started or starting - if (plugin->state == CP_PLUGIN_ACTIVE) { - return CP_OK; - } else if (plugin->state == CP_PLUGIN_STARTING) { - warn_dependency_loop(context, plugin, importing, 1); - return CP_OK; - } - assert(plugin->state == CP_PLUGIN_RESOLVED); - - // Check for dependency loops - if (cpi_ptrset_contains(importing, plugin)) { - warn_dependency_loop(context, plugin, importing, 0); - return CP_OK; - } - if (!cpi_ptrset_add(importing, plugin)) { - cpi_errorf(context, - N_("Plug-in %s could not be started due to insufficient memory."), - plugin->plugin->identifier); - return CP_ERR_RESOURCE; - } - - // Start up dependencies - node = list_first(plugin->imported); - while (node != NULL) { - cp_plugin_t *ip = lnode_get(node); - - if ((status = start_plugin_rec(context, ip, importing)) != CP_OK) { - break; - } - node = list_next(plugin->imported, node); - } - cpi_ptrset_remove(importing, plugin); - - // Start up this plug-in - if (status == CP_OK) { - status = start_plugin_runtime(context, plugin); - } - - return status; -} - -CP_HIDDEN cp_status_t cpi_start_plugin(cp_context_t *context, cp_plugin_t *plugin) { - cp_status_t status; - - if ((status = resolve_plugin(context, plugin)) == CP_OK) { - list_t *importing = list_create(LISTCOUNT_T_MAX); - if (importing != NULL) { - status = start_plugin_rec(context, plugin, importing); - assert(list_isempty(importing)); - list_destroy(importing); - } else { - cpi_errorf(context, - N_("Plug-in %s could not be started due to insufficient memory."), - plugin->plugin->identifier); - status = CP_ERR_RESOURCE; - } - } - return status; -} - -CP_C_API cp_status_t cp_start_plugin(cp_context_t *context, const char *id) { - hnode_t *node; - cp_status_t status = CP_OK; - - CHECK_NOT_NULL(context); - CHECK_NOT_NULL(id); - - // Look up and start the plug-in - cpi_lock_context(context); - cpi_check_invocation(context, CPI_CF_ANY, __func__); - node = hash_lookup(context->env->plugins, id); - if (node != NULL) { - status = cpi_start_plugin(context, hnode_get(node)); - } else { - cpi_warnf(context, N_("Unknown plug-in %s could not be started."), id); - status = CP_ERR_UNKNOWN; - } - cpi_unlock_context(context); - - return status; -} - -/** - * Stops the plug-in runtime of the specified plug-in. This function does - * not consider dependencies and assumes that the plug-in is active. - * - * @param context the plug-in context - * @param plugin the plug-in - */ -static void stop_plugin_runtime(cp_context_t *context, cp_plugin_t *plugin) { - cpi_plugin_event_t event; - - // Destroy plug-in instance - event.plugin_id = plugin->plugin->identifier; - if (plugin->context != NULL) { - - // Wait until possible run functions have stopped - cpi_stop_plugin_run(plugin); - - // Stop the plug-in - if (plugin->runtime_funcs->stop != NULL) { - - // About to stop the plug-in - event.old_state = plugin->state; - event.new_state = plugin->state = CP_PLUGIN_STOPPING; - cpi_deliver_event(context, &event); - - // Invoke stop function - context->env->in_stop_func_invocation++; - plugin->runtime_funcs->stop(plugin->plugin_data); - context->env->in_stop_func_invocation--; - - } - - // Unregister all logger functions - cpi_unregister_loggers(plugin->context->env->loggers, plugin); - - // Unregister all plug-in listeners - cpi_unregister_plisteners(plugin->context->env->plugin_listeners, plugin); - - // Release resolved symbols - if (plugin->context->resolved_symbols != NULL) { - while (!hash_isempty(plugin->context->resolved_symbols)) { - hscan_t scan; - hnode_t *node; - const void *ptr; - - hash_scan_begin(&scan, plugin->context->resolved_symbols); - node = hash_scan_next(&scan); - ptr = hnode_getkey(node); - cp_release_symbol(context, ptr); - } - assert(hash_isempty(plugin->context->resolved_symbols)); - } - if (plugin->context->symbol_providers != NULL) { - assert(hash_isempty(plugin->context->symbol_providers)); - } - - // Release defined symbols - if (plugin->defined_symbols != NULL) { - hscan_t scan; - hnode_t *node; - - hash_scan_begin(&scan, plugin->defined_symbols); - while ((node = hash_scan_next(&scan)) != NULL) { - char *n = (char *) hnode_getkey(node); - hash_scan_delfree(plugin->defined_symbols, node); - free(n); - } - hash_destroy(plugin->defined_symbols); - plugin->defined_symbols = NULL; - } - - } - - // Plug-in stopped - cpi_ptrset_remove(context->env->started_plugins, plugin); - event.old_state = plugin->state; - event.new_state = plugin->state = CP_PLUGIN_RESOLVED; - cpi_deliver_event(context, &event); -} - -/** - * Stops the plug-in and all plug-ins depending on it. - * - * @param context the plug-in context - * @param plugin the plug-in - */ -static void stop_plugin_rec(cp_context_t *context, cp_plugin_t *plugin) { - lnode_t *node; - - // Check if already stopped - if (plugin->state < CP_PLUGIN_ACTIVE) { - return; - } - - // Check for dependency loops - if (plugin->processed) { - return; - } - plugin->processed = 1; - - // Stop the depending plug-ins - node = list_first(plugin->importing); - while (node != NULL) { - stop_plugin_rec(context, lnode_get(node)); - node = list_next(plugin->importing, node); - } - - // Stop this plug-in - assert(plugin->state == CP_PLUGIN_ACTIVE); - stop_plugin_runtime(context, plugin); - assert(plugin->state < CP_PLUGIN_ACTIVE); - - // Clear processed flag - plugin->processed = 0; -} - -static void stop_plugin(cp_context_t *context, cp_plugin_t *plugin) { - stop_plugin_rec(context, plugin); - assert_processed_zero(context); -} - -CP_C_API cp_status_t cp_stop_plugin(cp_context_t *context, const char *id) { - hnode_t *node; - cp_plugin_t *plugin; - cp_status_t status = CP_OK; - - CHECK_NOT_NULL(context); - CHECK_NOT_NULL(id); - - // Look up and stop the plug-in - cpi_lock_context(context); - cpi_check_invocation(context, CPI_CF_ANY, __func__); - node = hash_lookup(context->env->plugins, id); - if (node != NULL) { - plugin = hnode_get(node); - stop_plugin(context, plugin); - } else { - cpi_warnf(context, N_("Unknown plug-in %s could not be stopped."), id); - status = CP_ERR_UNKNOWN; - } - cpi_unlock_context(context); - - return status; -} - -CP_C_API void cp_stop_plugins(cp_context_t *context) { - lnode_t *node; - - CHECK_NOT_NULL(context); - - // Stop the active plug-ins in the reverse order they were started - cpi_lock_context(context); - cpi_check_invocation(context, CPI_CF_ANY, __func__); - while ((node = list_last(context->env->started_plugins)) != NULL) { - stop_plugin(context, lnode_get(node)); - } - cpi_unlock_context(context); -} - -static void unresolve_plugin_rec(cp_context_t *context, cp_plugin_t *plugin) { - lnode_t *node; - cpi_plugin_event_t event; - - // Check if already unresolved - if (plugin->state < CP_PLUGIN_RESOLVED) { - return; - } - assert(plugin->state == CP_PLUGIN_RESOLVED); - - // Clear the list of imported plug-ins (also breaks dependency loops) - while ((node = list_first(plugin->imported)) != NULL) { - cp_plugin_t *ip = lnode_get(node); - - cpi_ptrset_remove(ip->importing, plugin); - list_delete(plugin->imported, node); - lnode_destroy(node); - } - assert(list_isempty(plugin->imported)); - list_destroy(plugin->imported); - plugin->imported = NULL; - - // Unresolve depending plugins - while ((node = list_first(plugin->importing)) != NULL) { - unresolve_plugin_rec(context, lnode_get(node)); - } - - // Unresolve this plug-in - unresolve_plugin_runtime(plugin); - event.plugin_id = plugin->plugin->identifier; - event.old_state = plugin->state; - event.new_state = plugin->state = CP_PLUGIN_INSTALLED; - cpi_deliver_event(context, &event); -} - -/** - * Unresolves a plug-in. - * - * @param context the plug-in context - * @param plug-in the plug-in to be unresolved - */ -static void unresolve_plugin(cp_context_t *context, cp_plugin_t *plugin) { - stop_plugin(context, plugin); - unresolve_plugin_rec(context, plugin); -} - -static void free_plugin_import_content(cp_plugin_import_t *import) { - assert(import != NULL); - free(import->plugin_id); - free(import->version); -} - -static void free_ext_point_content(cp_ext_point_t *ext_point) { - free(ext_point->name); - free(ext_point->local_id); - free(ext_point->identifier); - free(ext_point->schema_path); -} - -static void free_extension_content(cp_extension_t *extension) { - free(extension->name); - free(extension->local_id); - free(extension->identifier); - free(extension->ext_point_id); -} - -static void free_cfg_element_content(cp_cfg_element_t *ce) { - int i; - - assert(ce != NULL); - free(ce->name); - if (ce->atts != NULL) { - free(ce->atts[0]); - free(ce->atts); - } - free(ce->value); - for (i = 0; i < ce->num_children; i++) { - free_cfg_element_content(ce->children + i); - } - free(ce->children); -} - -CP_HIDDEN void cpi_free_plugin(cp_plugin_info_t *plugin) { - int i; - - assert(plugin != NULL); - free(plugin->name); - free(plugin->summary); - free(plugin->identifier); - free(plugin->version); - free(plugin->provider_name); - free(plugin->plugin_path); - free(plugin->abi_bw_compatibility); - free(plugin->api_bw_compatibility); - free(plugin->req_cpluff_version); - for (i = 0; i < plugin->num_imports; i++) { - free_plugin_import_content(plugin->imports + i); - } - free(plugin->imports); - free(plugin->runtime_lib_name); - free(plugin->runtime_funcs_symbol); - for (i = 0; i < plugin->num_ext_points; i++) { - free_ext_point_content(plugin->ext_points + i); - } - free(plugin->ext_points); - for (i = 0; i < plugin->num_extensions; i++) { - free_extension_content(plugin->extensions + i); - if (plugin->extensions[i].configuration != NULL) { - free_cfg_element_content(plugin->extensions[i].configuration); - free(plugin->extensions[i].configuration); - } - } - free(plugin->extensions); - free(plugin); -} - -/** - * Frees any memory allocated for a registered plug-in. - * - * @param context the plug-in context - * @param plugin the plug-in to be freed - */ -static void free_registered_plugin(cp_context_t *context, cp_plugin_t *plugin) { - assert(context != NULL); - assert(plugin != NULL); - - // Release plug-in information - cpi_release_info(context, plugin->plugin); - - // Release data structures - if (plugin->importing != NULL) { - assert(list_isempty(plugin->importing)); - list_destroy(plugin->importing); - } - assert(plugin->imported == NULL); - - free(plugin); -} - -/** - * Uninstalls a plug-in associated with the specified hash node. - * - * @param context the plug-in context - * @param node the hash node of the plug-in to be uninstalled - */ -static void uninstall_plugin(cp_context_t *context, hnode_t *node) { - cp_plugin_t *plugin; - cpi_plugin_event_t event; - - // Check if already uninstalled - plugin = (cp_plugin_t *) hnode_get(node); - if (plugin->state <= CP_PLUGIN_UNINSTALLED) { - // TODO: Is this possible state? - return; - } - - // Make sure the plug-in is not in resolved state - unresolve_plugin(context, plugin); - assert(plugin->state == CP_PLUGIN_INSTALLED); - - // Plug-in uninstalled - event.plugin_id = plugin->plugin->identifier; - event.old_state = plugin->state; - event.new_state = plugin->state = CP_PLUGIN_UNINSTALLED; - cpi_deliver_event(context, &event); - - // Unregister extension objects - unregister_extensions(context, plugin->plugin); - - // Unregister the plug-in - hash_delete_free(context->env->plugins, node); - - // Free the plug-in data structures - free_registered_plugin(context, plugin); -} - -CP_C_API cp_status_t cp_uninstall_plugin(cp_context_t *context, const char *id) { - hnode_t *node; - cp_status_t status = CP_OK; - - CHECK_NOT_NULL(context); - CHECK_NOT_NULL(id); - - // Look up and unload the plug-in - cpi_lock_context(context); - cpi_check_invocation(context, CPI_CF_ANY, __func__); - node = hash_lookup(context->env->plugins, id); - if (node != NULL) { - uninstall_plugin(context, node); - } else { - cpi_warnf(context, N_("Unknown plug-in %s could not be uninstalled."), id); - status = CP_ERR_UNKNOWN; - } - cpi_unlock_context(context); - - return status; -} - -CP_C_API void cp_uninstall_plugins(cp_context_t *context) { - hscan_t scan; - hnode_t *node; - - CHECK_NOT_NULL(context); - - cpi_lock_context(context); - cpi_check_invocation(context, CPI_CF_ANY, __func__); - cp_stop_plugins(context); - while (1) { - hash_scan_begin(&scan, context->env->plugins); - if ((node = hash_scan_next(&scan)) != NULL) { - uninstall_plugin(context, node); - } else { - break; - } - } - cpi_unlock_context(context); -} diff --git a/xbmc/lib/cpluff-0.1.3/libcpluff/ploader.c b/xbmc/lib/cpluff-0.1.3/libcpluff/ploader.c deleted file mode 100644 index d6ea187d99..0000000000 --- a/xbmc/lib/cpluff-0.1.3/libcpluff/ploader.c +++ /dev/null @@ -1,1190 +0,0 @@ -/*------------------------------------------------------------------------- - * C-Pluff, a plug-in framework for C - * Copyright 2007 Johannes Lehtinen - * - * Permission is hereby granted, free of charge, to any person obtaining a - * copy of this software and associated documentation files (the "Software"), - * to deal in the Software without restriction, including without limitation - * the rights to use, copy, modify, merge, publish, distribute, sublicense, - * and/or sell copies of the Software, and to permit persons to whom the - * Software is furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included - * in all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS - * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF - * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. - * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY - * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, - * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE - * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - *-----------------------------------------------------------------------*/ - -/** @file - * Plug-in descriptor loader - */ - -#include <stdio.h> -#include <stdlib.h> -#include <string.h> -#include <assert.h> -#include <stdarg.h> -#include <sys/stat.h> -#include <fcntl.h> -#include <expat.h> -#include "cpluff.h" -#include "defines.h" -#include "util.h" -#include "internal.h" - -// Use XMLCALL if available -#ifdef XMLCALL -#define CP_XMLCALL XMLCALL -#else -#define CP_XMLCALL -#endif - - -/* ------------------------------------------------------------------------ - * Constants - * ----------------------------------------------------------------------*/ - -/// XML parser buffer size (in bytes) -#define CP_XML_PARSER_BUFFER_SIZE 4096 - -/// Initial configuration element value size -#define CP_CFG_ELEMENT_VALUE_INITSIZE 64 - -/// Plugin descriptor name -#define CP_PLUGIN_DESCRIPTOR "addon.xml" - - -/* ------------------------------------------------------------------------ - * Internal data types - * ----------------------------------------------------------------------*/ - -typedef struct ploader_context_t ploader_context_t; - -/// Parser states -typedef enum parser_state_t { - PARSER_BEGIN, - PARSER_PLUGIN, - PARSER_REQUIRES, - PARSER_EXTENSION, - PARSER_END, - PARSER_UNKNOWN, - PARSER_ERROR -} parser_state_t; - -/// Plug-in loader context -struct ploader_context_t { - - /// The plug-in context, or NULL if none - cp_context_t *context; - - /// The XML parser being used - XML_Parser parser; - - /// The file being parsed - char *file; - - /// The plug-in being constructed - cp_plugin_info_t *plugin; - - /// The configuration element being constructed - cp_cfg_element_t *configuration; - - /// The current parser state - parser_state_t state; - - /// The saved parser state (used in PARSER_UNKNOWN) - parser_state_t saved_state; - - /** - * The current parser depth (used in PARSER_UNKNOWN and PARSER_EXTENSION) - */ - unsigned int depth; - - /// The number of skipped configuration elements - unsigned int skippedCEs; - - /// Size of allocated imports table - size_t imports_size; - - /// Size of allocated extension points table - size_t ext_points_size; - - /// Size of allocated extensions table - size_t extensions_size; - - /// Buffer for a value being read - char *value; - - /// Size of allocated value field - size_t value_size; - - /// Current length of value string - size_t value_length; - - /// The number of parsing errors that have occurred - unsigned int error_count; - - /// The number of resource errors that have occurred - unsigned int resource_error_count; -}; - - -/* ------------------------------------------------------------------------ - * Function definitions - * ----------------------------------------------------------------------*/ - -/** - * Reports a descriptor error. Does not set the parser to error state but - * increments the error count, unless this is merely a warning. - * - * @param context the parsing context - * @param warn whether this is only a warning - * @param error_msg the error message - * @param ... parameters for the error message - */ -static void descriptor_errorf(ploader_context_t *plcontext, int warn, - const char *error_msg, ...) { - va_list ap; - char message[128]; - - va_start(ap, error_msg); - vsnprintf(message, sizeof(message), error_msg, ap); - va_end(ap); - message[127] = '\0'; - if (warn) { - cpi_warnf(plcontext->context, - N_("Suspicious plug-in descriptor content in %s, line %d, column %d (%s)."), - plcontext->file, - XML_GetCurrentLineNumber(plcontext->parser), - XML_GetCurrentColumnNumber(plcontext->parser) + 1, - message); - } else { - cpi_errorf(plcontext->context, - N_("Invalid plug-in descriptor content in %s, line %d, column %d (%s)."), - plcontext->file, - XML_GetCurrentLineNumber(plcontext->parser), - XML_GetCurrentColumnNumber(plcontext->parser) + 1, - message); - } - if (!warn) { - plcontext->error_count++; - } -} - -/** - * Reports insufficient system resources while parsing and increments the - * resource error count. - * - * @param context the parsing context - */ -static void resource_error(ploader_context_t *plcontext) { - if (plcontext->resource_error_count == 0) { - cpi_errorf(plcontext->context, - N_("Insufficient system resources to parse plug-in descriptor content in %s, line %d, column %d."), - plcontext->file, - XML_GetCurrentLineNumber(plcontext->parser), - XML_GetCurrentColumnNumber(plcontext->parser) + 1); - } - plcontext->resource_error_count++; -} - -/** - * Returns whether the specified NULL-terminated list of strings includes - * the specified string. - * - * @param list the NULL-terminated list of strings, or NULL if none - * @param str the string - * @param step the stepping (1 to check every string or 2 to check every - * other string) - * @return pointer to the location of the string or NULL if not found - */ -static const XML_Char * const *contains_str(const XML_Char * const *list, - const XML_Char *str, int step) { - if (list != NULL) { - while (*list != NULL) { - if (!strcmp(*list, str)) { - return list; - } - list += step; - } - } - return NULL; -} - -/** - * Checks that an element has non-empty values for required attributes. - * Increments the error count for each missing attribute. - * - * @param context the parsing context - * @param elem the element being checked - * @param atts the attribute list for the element - * @param req_atts the required attributes (NULL terminated list, or NULL) - * @return whether the required attributes are present - */ -static int check_req_attributes(ploader_context_t *plcontext, - const XML_Char *elem, const XML_Char * const *atts, - const XML_Char * const *req_atts) { - const XML_Char * const *a; - int error = 0; - - // Check that required attributes have non-empty values - for (a = req_atts; a != NULL && *a != NULL; a++) { - const XML_Char * const *av; - - if ((av = contains_str(atts, *a, 2)) != NULL) { - if ((*(av + 1))[0] == '\0') { - descriptor_errorf(plcontext, 0, - _("required attribute %s for element %s has an empty value"), - *a, elem); - error = 1; - } - } else { - descriptor_errorf(plcontext, 0, - _("required attribute %s missing for element %s"), - *a, elem); - error = 1; - } - } - - return !error; -} - -/** - * Checks that an element has non-empty values for required attributes and - * warns if there are unknown attributes. Increments the error count for - * each missing required attribute. - * - * @param context the parsing context - * @param elem the element being checked - * @param atts the attribute list for the element - * @param req_atts the required attributes (NULL terminated list, or NULL) - * @param opt_atts the optional attributes (NULL terminated list, or NULL) - * @return whether the required attributes are present - */ -static int check_attributes(ploader_context_t *plcontext, - const XML_Char *elem, const XML_Char * const *atts, - const XML_Char * const *req_atts, const XML_Char * const *opt_atts) { - int error = 0; - - // Check required attributes - error = !check_req_attributes(plcontext, elem, atts, req_atts); - - // Warn if there are unknown attributes - for (; *atts != NULL; atts += 2) { - if (contains_str(req_atts, *atts, 1) == NULL - && contains_str(opt_atts, *atts, 1) == NULL) { - descriptor_errorf(plcontext, 1, - _("ignoring unknown attribute %s for element %s"), - *atts, elem); - } - } - - return !error; -} - -/** - * Allocates memory using malloc. Reports a resource error if there is not - * enough available memory. - * - * @param context the parsing context - * @param size the number of bytes to allocate - * @return pointer to the allocated memory, or NULL if memory allocation failed - */ -static void *parser_malloc(ploader_context_t *plcontext, size_t size) { - void *ptr; - - if ((ptr = malloc(size)) == NULL) { - resource_error(plcontext); - } - return ptr; -} - -/** - * Makes a copy of the specified string. The memory is allocated using malloc. - * Reports a resource error if there is not enough available memory. - * - * @param context the parsing context - * @param src the source string to be copied - * @return copy of the string, or NULL if memory allocation failed - */ -static char *parser_strdup(ploader_context_t *plcontext, const char *src) { - char *dup; - - if ((dup = strdup(src)) == NULL) { - resource_error(plcontext); - } - return dup; -} - -/** - * Concatenates the specified strings into a new string. The memory for the concatenated - * string is allocated using malloc. Reports a resource error if there is not - * enough available memory. - * - * @param context the parsing context - * @param ... the strings to be concatenated, terminated by NULL - * @return the concatenated string, or NULL if memory allocation failed - */ -static char *parser_strscat(ploader_context_t *plcontext, ...) { - va_list ap; - const char *str; - char *dst; - size_t len; - - // Calculate the length of the concatenated string - va_start(ap, plcontext); - len = 0; - while ((str = va_arg(ap, const char *)) != NULL) { - len += strlen(str); - } - va_end(ap); - - // Allocate space for the concatenated string - if ((dst = parser_malloc(plcontext, sizeof(char) * (len + 1))) == NULL) { - return NULL; - } - - // Copy the strings - len = 0; - va_start(ap, plcontext); - while ((str = va_arg(ap, const char *)) != NULL) { - strcpy(dst + len, str); - len += strlen(str); - } - va_end(ap); - dst[len] = '\0'; - return dst; -} - -/** - * Puts the parser to a state in which it skips an unknown element. - * Warns error handlers about the unknown element. - * - * @param context the parsing context - * @param elem the element name - */ -static void unexpected_element(ploader_context_t *plcontext, const XML_Char *elem) { - plcontext->saved_state = plcontext->state; - plcontext->state = PARSER_UNKNOWN; - plcontext->depth = 0; - descriptor_errorf(plcontext, 1, _("ignoring unexpected element %s and its contents"), elem); -} - -/** - * Creates a copy of the specified attributes. Reports failed memory - * allocation. - * - * @param context the parser context - * @param src the source attributes to be copied - * @param num pointer to the location where number of attributes is stored, - * or NULL for none - * @return the duplicated attribute array, or NULL if empty or failed - */ -static char **parser_attsdup(ploader_context_t *plcontext, const XML_Char * const *src, - unsigned int *num_atts) { - char **atts = NULL, *attr_data = NULL; - unsigned int i; - unsigned int num; - size_t attr_size; - - // Calculate the number of attributes and the amount of space required - for (num = 0, attr_size = 0; src[num] != NULL; num++) { - attr_size += strlen(src[num]) + 1; - } - assert((num & 1) == 0); - - // Allocate necessary memory and copy attribute data - if (num > 0) { - if ((atts = parser_malloc(plcontext, num * sizeof(char *))) != NULL) { - if ((attr_data = parser_malloc(plcontext, attr_size * sizeof(char))) != NULL) { - size_t offset; - - for (i = 0, offset = 0; i < num; i++) { - strcpy(attr_data + offset, src[i]); - atts[i] = attr_data + offset; - offset += strlen(src[i]) + 1; - } - } - } - } - - // If successful then return duplicates, otherwise free any allocations - if (num == 0 || (atts != NULL && attr_data != NULL)) { - if (num_atts != NULL) { - *num_atts = num / 2; - } - return atts; - } else { - free(attr_data); - free(atts); - return NULL; - } -} - -/** - * Initializes a configuration element. Reports an error if memory allocation fails. - * - * @param context the parser context - * @param ce the configuration element to be initialized - * @param name the element name - * @param atts the element attributes - * @param parent the parent element - */ -static void init_cfg_element(ploader_context_t *plcontext, cp_cfg_element_t *ce, - const XML_Char *name, const XML_Char * const *atts, cp_cfg_element_t *parent) { - - // Initialize the configuration element - memset(ce, 0, sizeof(cp_cfg_element_t)); - ce->name = parser_strdup(plcontext, name); - ce->atts = parser_attsdup(plcontext, atts, &(ce->num_atts)); - ce->value = NULL; - plcontext->value = NULL; - plcontext->value_size = 0; - plcontext->value_length = 0; - ce->parent = parent; - ce->children = NULL; -} - -/** - * Processes the character data while parsing. - * - * @param userData the parsing context - * @param str the string data - * @param len the string length - */ -static void CP_XMLCALL character_data_handler( - void *userData, const XML_Char *str, int len) { - ploader_context_t *plcontext = userData; - - // Ignore leading whitespace - if (plcontext->value == NULL) { - int i; - - for (i = 0; i < len; i++) { - if (str[i] != ' ' && str[i] != '\n' && str[i] != '\r' && str[i] != '\t') { - break; - } - } - str += i; - len -= i; - if (len == 0) { - return; - } - } - - // Allocate more memory for the character data if needed - if (plcontext->value_length + len >= plcontext->value_size) { - size_t ns; - char *nv; - - ns = plcontext->value_size; - while (plcontext->value_length + len >= ns) { - if (ns == 0) { - ns = CP_CFG_ELEMENT_VALUE_INITSIZE; - } else { - ns = 2 * ns; - } - } - if ((nv = realloc(plcontext->value, ns * sizeof(char))) != NULL) { - plcontext->value = nv; - plcontext->value_size = ns; - } else { - resource_error(plcontext); - return; - } - } - - // Copy character data - strncpy(plcontext->value + plcontext->value_length, str, len * sizeof(char)); - plcontext->value_length += len; -} - -/** - * Processes the start of element events while parsing. - * - * @param userData the parsing context - * @param name the element name - * @param atts the element attributes - */ -static void CP_XMLCALL start_element_handler( - void *userData, const XML_Char *name, const XML_Char **atts) { - static const XML_Char * const req_plugin_atts[] = { "id", "summary", "version", NULL }; - static const XML_Char * const opt_plugin_atts[] = { "name", "provider-name", NULL }; - static const XML_Char * const req_bwcompatibility_atts[] = { NULL }; - static const XML_Char * const opt_bwcompatibility_atts[] = { "abi", "api", NULL }; - static const XML_Char * const req_cpluff_atts[] = { "version", NULL }; - static const XML_Char * const opt_cpluff_atts[] = { NULL }; - static const XML_Char * const req_import_atts[] = { "addon", NULL }; - static const XML_Char * const opt_import_atts[] = { "version", "optional", NULL }; - static const XML_Char * const req_runtime_atts[] = { "library", NULL }; - static const XML_Char * const opt_runtime_atts[] = { "funcs", NULL }; - static const XML_Char * const req_ext_point_atts[] = { "id", NULL }; - static const XML_Char * const opt_ext_point_atts[] = { "name", "schema", NULL }; - static const XML_Char * const req_extension_atts[] = { "point", NULL }; - //static const XML_Char * const opt_extension_atts[] = { "id", "name", NULL }; - ploader_context_t *plcontext = userData; - unsigned int i; - - // Process element start - switch (plcontext->state) { - - case PARSER_BEGIN: - if (!strcmp(name, "addon")) { - plcontext->state = PARSER_PLUGIN; - if (!check_attributes(plcontext, name, atts, - req_plugin_atts, opt_plugin_atts)) { - break; - } - for (i = 0; atts[i] != NULL; i += 2) { - if (!strcmp(atts[i], "name")) { - plcontext->plugin->name - = parser_strdup(plcontext, atts[i+1]); - } else if (!strcmp(atts[i], "id")) { - plcontext->plugin->identifier - = parser_strdup(plcontext, atts[i+1]); - } else if (!strcmp(atts[i], "summary")) { - plcontext->plugin->summary - = parser_strdup(plcontext, atts[i+1]); - } else if (!strcmp(atts[i], "version")) { - plcontext->plugin->version - = parser_strdup(plcontext, atts[i+1]); - } else if (!strcmp(atts[i], "provider-name")) { - plcontext->plugin->provider_name - = parser_strdup(plcontext, atts[i+1]); - } - } - } else { - unexpected_element(plcontext, name); - } - break; - - case PARSER_PLUGIN: - if (!strcmp(name, "backwards-compatibility")) { - if (check_attributes(plcontext, name, atts, - req_bwcompatibility_atts, opt_bwcompatibility_atts)) { - for (i = 0; atts[i] != NULL; i += 2) { - if (!strcmp(atts[i], "abi")) { - plcontext->plugin->abi_bw_compatibility = parser_strdup(plcontext, atts[i+1]); - } else if (!strcmp(atts[i], "api")) { - plcontext->plugin->api_bw_compatibility = parser_strdup(plcontext, atts[i+1]); - } - } - } - } else if (!strcmp(name, "requires")) { - plcontext->state = PARSER_REQUIRES; - } else if (!strcmp(name, "runtime")) { - if (check_attributes(plcontext, name, atts, - req_runtime_atts, opt_runtime_atts)) { - for (i = 0; atts[i] != NULL; i += 2) { - if (!strcmp(atts[i], "library")) { - plcontext->plugin->runtime_lib_name - = parser_strdup(plcontext, atts[i+1]); - } else if (!strcmp(atts[i], "funcs")) { - plcontext->plugin->runtime_funcs_symbol - = parser_strdup(plcontext, atts[i+1]); - } - } - } - } else if (!strcmp(name, "extension-point")) { - if (check_attributes(plcontext, name, atts, - req_ext_point_atts, opt_ext_point_atts)) { - cp_ext_point_t *ext_point; - - // Allocate space for extension points, if necessary - if (plcontext->plugin->num_ext_points == plcontext->ext_points_size) { - cp_ext_point_t *nep; - size_t ns; - - if (plcontext->ext_points_size == 0) { - ns = 4; - } else { - ns = plcontext->ext_points_size * 2; - } - if ((nep = realloc(plcontext->plugin->ext_points, - ns * sizeof(cp_ext_point_t))) == NULL) { - resource_error(plcontext); - break; - } - plcontext->plugin->ext_points = nep; - plcontext->ext_points_size = ns; - } - - // Parse extension point specification - ext_point = plcontext->plugin->ext_points - + plcontext->plugin->num_ext_points; - memset(ext_point, 0, sizeof(cp_ext_point_t)); - ext_point->plugin = plcontext->plugin; - ext_point->name = NULL; - ext_point->local_id = NULL; - ext_point->identifier = NULL; - ext_point->schema_path = NULL; - for (i = 0; atts[i] != NULL; i += 2) { - if (!strcmp(atts[i], "name")) { - ext_point->name - = parser_strdup(plcontext, atts[i+1]); - } else if (!strcmp(atts[i], "id")) { - ext_point->local_id - = parser_strdup(plcontext, atts[i+1]); - ext_point->identifier - = parser_strscat(plcontext, - plcontext->plugin->identifier, ".", atts[i+1], NULL); - } else if (!strcmp(atts[i], "schema")) { - ext_point->schema_path - = parser_strdup(plcontext, atts[i+1]); - } - } - plcontext->plugin->num_ext_points++; - - } - } else if (!(strcmp(name, "extension"))) { - plcontext->state = PARSER_EXTENSION; - plcontext->depth = 0; - if (check_req_attributes( - plcontext, name, atts, req_extension_atts)) { - cp_extension_t *extension; - - // Allocate space for extensions, if necessary - if (plcontext->plugin->num_extensions == plcontext->extensions_size) { - cp_extension_t *ne; - size_t ns; - - if (plcontext->extensions_size == 0) { - ns = 16; - } else { - ns = plcontext->extensions_size * 2; - } - if ((ne = realloc(plcontext->plugin->extensions, - ns * sizeof(cp_extension_t))) == NULL) { - resource_error(plcontext); - break; - } - plcontext->plugin->extensions = ne; - plcontext->extensions_size = ns; - } - - // Parse extension attributes - extension = plcontext->plugin->extensions - + plcontext->plugin->num_extensions; - memset(extension, 0, sizeof(cp_extension_t)); - extension->plugin = plcontext->plugin; - extension->name = NULL; - extension->local_id = NULL; - extension->identifier = NULL; - extension->ext_point_id = NULL; - extension->configuration = NULL; - for (i = 0; atts[i] != NULL; i += 2) { - if (!strcmp(atts[i], "point")) { - extension->ext_point_id - = parser_strdup(plcontext, atts[i+1]); - } else if (!strcmp(atts[i], "id")) { - extension->local_id - = parser_strdup(plcontext, atts[i+1]); - extension->identifier - = parser_strscat(plcontext, - plcontext->plugin->identifier, ".", atts[i+1], NULL); - } else if (!strcmp(atts[i], "name")) { - extension->name - = parser_strdup(plcontext, atts[i+1]); - } - } - plcontext->plugin->num_extensions++; - - // Initialize configuration parsing - if ((extension->configuration = plcontext->configuration - = parser_malloc(plcontext, sizeof(cp_cfg_element_t))) != NULL) { - init_cfg_element(plcontext, plcontext->configuration, name, atts, NULL); - } - XML_SetCharacterDataHandler(plcontext->parser, character_data_handler); - } - } else { - unexpected_element(plcontext, name); - } - break; - - case PARSER_REQUIRES: - if (!strcmp(name, "c-pluff")) { - if (check_attributes(plcontext, name, atts, - req_cpluff_atts, opt_cpluff_atts)) { - for (i = 0; atts[i] != NULL; i += 2) { - if (!strcmp(atts[i], "version")) { - plcontext->plugin->req_cpluff_version = parser_strdup(plcontext, atts[i+1]); - } - } - } - } else if (!strcmp(name, "import")) { - if (check_attributes(plcontext, name, atts, - req_import_atts, opt_import_atts)) { - cp_plugin_import_t *import = NULL; - - // Allocate space for imports, if necessary - if (plcontext->plugin->num_imports == plcontext->imports_size) { - cp_plugin_import_t *ni; - size_t ns; - - if (plcontext->imports_size == 0) { - ns = 16; - } else { - ns = plcontext->imports_size * 2; - } - if ((ni = realloc(plcontext->plugin->imports, - ns * sizeof(cp_plugin_import_t))) == NULL) { - resource_error(plcontext); - break; - } - plcontext->plugin->imports = ni; - plcontext->imports_size = ns; - } - - // Parse import specification - import = plcontext->plugin->imports - + plcontext->plugin->num_imports; - memset(import, 0, sizeof(cp_plugin_import_t)); - import->plugin_id = NULL; - import->version = NULL; - for (i = 0; atts[i] != NULL; i += 2) { - if (!strcmp(atts[i], "addon")) { - import->plugin_id - = parser_strdup(plcontext, atts[i+1]); - } else if (!strcmp(atts[i], "version")) { - import->version = parser_strdup(plcontext, atts[i+1]); - } else if (!strcmp(atts[i], "optional")) { - if (!strcmp(atts[i+1], "true") - || !strcmp(atts[i+1], "1")) { - import->optional = 1; - } else if (strcmp(atts[i+1], "false") - && strcmp(atts[i+1], "0")) { - descriptor_errorf(plcontext, 0, _("unknown boolean value: %s"), atts[i+1]); - } - } - } - plcontext->plugin->num_imports++; - } - } else { - unexpected_element(plcontext, name); - } - break; - - case PARSER_EXTENSION: - plcontext->depth++; - if (plcontext->configuration != NULL && plcontext->skippedCEs == 0) { - cp_cfg_element_t *ce; - - // Allocate more space for children, if necessary - if (plcontext->configuration->num_children == plcontext->configuration->index) { - cp_cfg_element_t *nce; - size_t ns; - - if (plcontext->configuration->index == 0) { - ns = 16; - } else { - ns = plcontext->configuration->index * 2; - } - if ((nce = realloc(plcontext->configuration->children, - ns * sizeof(cp_cfg_element_t))) == NULL) { - plcontext->skippedCEs++; - resource_error(plcontext); - break; - } - plcontext->configuration->children = nce; - plcontext->configuration->index = ns; - } - - // Save possible value - if (plcontext->value != NULL) { - plcontext->value[plcontext->value_length] = '\0'; - plcontext->configuration->value = plcontext->value; - } - - ce = plcontext->configuration->children + plcontext->configuration->num_children; - init_cfg_element(plcontext, ce, name, atts, plcontext->configuration); - plcontext->configuration->num_children++; - plcontext->configuration = ce; - } - break; - - case PARSER_UNKNOWN: - plcontext->depth++; - break; - default: - unexpected_element(plcontext, name); - break; - } -} - -/** - * Processes the end of element events while parsing. - * - * @param context the parsing context - * @param name the element name - */ -static void CP_XMLCALL end_element_handler( - void *userData, const XML_Char *name) { - ploader_context_t *plcontext = userData; - - // Process element end - switch (plcontext->state) { - - case PARSER_PLUGIN: - if (!strcmp(name, "addon")) { - - // Readjust memory allocated for extension points, if necessary - if (plcontext->ext_points_size != plcontext->plugin->num_ext_points) { - cp_ext_point_t *nep; - - if ((nep = realloc(plcontext->plugin->ext_points, - plcontext->plugin->num_ext_points * - sizeof(cp_ext_point_t))) != NULL - || plcontext->plugin->num_ext_points == 0) { - plcontext->plugin->ext_points = nep; - plcontext->ext_points_size = plcontext->plugin->num_ext_points; - } - } - - // Readjust memory allocated for extensions, if necessary - if (plcontext->extensions_size != plcontext->plugin->num_extensions) { - cp_extension_t *ne; - - if ((ne = realloc(plcontext->plugin->extensions, - plcontext->plugin->num_extensions * - sizeof(cp_extension_t))) != NULL - || plcontext->plugin->num_extensions == 0) { - plcontext->plugin->extensions = ne; - plcontext->extensions_size = plcontext->plugin->num_extensions; - } - } - - plcontext->state = PARSER_END; - } - break; - - case PARSER_REQUIRES: - if (!strcmp(name, "requires")) { - - // Readjust memory allocated for imports, if necessary - if (plcontext->imports_size != plcontext->plugin->num_imports) { - cp_plugin_import_t *ni; - - if ((ni = realloc(plcontext->plugin->imports, - plcontext->plugin->num_imports * - sizeof(cp_plugin_import_t))) != NULL - || plcontext->plugin->num_imports == 0) { - plcontext->plugin->imports = ni; - plcontext->imports_size = plcontext->plugin->num_imports; - } - } - - plcontext->state = PARSER_PLUGIN; - } - break; - - case PARSER_UNKNOWN: - if (plcontext->depth-- == 0) { - plcontext->state = plcontext->saved_state; - } - break; - - case PARSER_EXTENSION: - if (plcontext->skippedCEs > 0) { - plcontext->skippedCEs--; - } else if (plcontext->configuration != NULL) { - - // Readjust memory allocated for children, if necessary - if (plcontext->configuration->index != plcontext->configuration->num_children) { - cp_cfg_element_t *nce; - - if ((nce = realloc(plcontext->configuration->children, - plcontext->configuration->num_children * - sizeof(cp_cfg_element_t))) != NULL - || plcontext->configuration->num_children == 0) { - plcontext->configuration->children = nce; - } - } - - if (plcontext->configuration->parent != NULL) { - plcontext->configuration->index = plcontext->configuration->parent->num_children - 1; - } else { - plcontext->configuration->index = 0; - } - if (plcontext->value != NULL) { - char *v = plcontext->value; - int i; - - // Ignore trailing whitespace - for (i = plcontext->value_length - 1; i >= 0; i--) { - if (v[i] != ' ' && v[i] != '\n' && v[i] != '\r' && v[i] != '\t') { - break; - } - } - if (i < 0) { - free(plcontext->value); - plcontext->value = NULL; - plcontext->value_length = 0; - plcontext->value_size = 0; - } else { - plcontext->value_length = i + 1; - } - } - if (plcontext->value != NULL) { - - // Readjust memory allocated for value, if necessary - if (plcontext->value_size > plcontext->value_length + 1) { - char *nv; - - if ((nv = realloc(plcontext->value, (plcontext->value_length + 1) * sizeof(char))) != NULL) { - plcontext->value = nv; - } - } - - plcontext->value[plcontext->value_length] = '\0'; - plcontext->configuration->value = plcontext->value; - plcontext->value = NULL; - plcontext->value_size = 0; - plcontext->value_length = 0; - } - plcontext->configuration = plcontext->configuration->parent; - - // Restore possible value - if (plcontext->configuration != NULL - && plcontext->configuration->value != NULL) { - plcontext->value = plcontext->configuration->value; - plcontext->value_length = strlen(plcontext->value); - plcontext->value_size = CP_CFG_ELEMENT_VALUE_INITSIZE; - while (plcontext->value_size < plcontext->value_length + 1) { - plcontext->value_size *= 2; - } - } - - } - if (plcontext->depth-- == 0) { - assert(!strcmp(name, "extension")); - plcontext->state = PARSER_PLUGIN; - XML_SetCharacterDataHandler(plcontext->parser, NULL); - } - break; - - default: - descriptor_errorf(plcontext, 0, _("unexpected closing tag for %s"), - name); - return; - } -} - -static void dealloc_plugin_info(cp_context_t *ctx, cp_plugin_info_t *plugin) { - cpi_free_plugin(plugin); -} - -CP_C_API cp_plugin_info_t * cp_load_plugin_descriptor(cp_context_t *context, const char *path, cp_status_t *error) { - char *file = NULL; - cp_status_t status = CP_OK; - FILE *fh = NULL; - XML_Parser parser = NULL; - ploader_context_t *plcontext = NULL; - cp_plugin_info_t *plugin = NULL; - - CHECK_NOT_NULL(context); - CHECK_NOT_NULL(path); - cpi_lock_context(context); - cpi_check_invocation(context, CPI_CF_ANY, __func__); - do { - int path_len; - - // Construct the file name for the plug-in descriptor - path_len = strlen(path); - if (path_len == 0) { - status = CP_ERR_IO; - break; - } - if (path[path_len - 1] == CP_FNAMESEP_CHAR) { - path_len--; - } - file = malloc((path_len + strlen(CP_PLUGIN_DESCRIPTOR) + 2) * sizeof(char)); - if (file == NULL) { - status = CP_ERR_RESOURCE; - break; - } - strcpy(file, path); - file[path_len] = CP_FNAMESEP_CHAR; - strcpy(file + path_len + 1, CP_PLUGIN_DESCRIPTOR); - - // Open the file - if ((fh = fopen(file, "rb")) == NULL) { - status = CP_ERR_IO; - break; - } - - // Initialize the XML parsing - parser = XML_ParserCreate(NULL); - if (parser == NULL) { - status = CP_ERR_RESOURCE; - break; - } - XML_SetElementHandler(parser, - start_element_handler, - end_element_handler); - - // Initialize the parsing context - if ((plcontext = malloc(sizeof(ploader_context_t))) == NULL) { - status = CP_ERR_RESOURCE; - break; - } - memset(plcontext, 0, sizeof(ploader_context_t)); - if ((plcontext->plugin = malloc(sizeof(cp_plugin_info_t))) == NULL) { - status = CP_ERR_RESOURCE; - break; - } - plcontext->context = context; - plcontext->configuration = NULL; - plcontext->value = NULL; - plcontext->parser = parser; - plcontext->file = file; - plcontext->state = PARSER_BEGIN; - memset(plcontext->plugin, 0, sizeof(cp_plugin_info_t)); - plcontext->plugin->name = NULL; - plcontext->plugin->summary = NULL; - plcontext->plugin->identifier = NULL; - plcontext->plugin->version = NULL; - plcontext->plugin->provider_name = NULL; - plcontext->plugin->abi_bw_compatibility = NULL; - plcontext->plugin->api_bw_compatibility = NULL; - plcontext->plugin->plugin_path = NULL; - plcontext->plugin->req_cpluff_version = NULL; - plcontext->plugin->imports = NULL; - plcontext->plugin->runtime_lib_name = NULL; - plcontext->plugin->runtime_funcs_symbol = NULL; - plcontext->plugin->ext_points = NULL; - plcontext->plugin->extensions = NULL; - XML_SetUserData(parser, plcontext); - - // Parse the plug-in descriptor - while (1) { - int bytes_read; - void *xml_buffer; - int i; - - // Get buffer from Expat - if ((xml_buffer = XML_GetBuffer(parser, CP_XML_PARSER_BUFFER_SIZE)) - == NULL) { - status = CP_ERR_RESOURCE; - break; - } - - // Read data into buffer - bytes_read = fread(xml_buffer, 1, CP_XML_PARSER_BUFFER_SIZE, fh); - if (ferror(fh)) { - status = CP_ERR_IO; - break; - } - - // Parse the data - if (!(i = XML_ParseBuffer(parser, bytes_read, bytes_read == 0)) - && context != NULL) { - cpi_lock_context(context); - cpi_errorf(context, - N_("XML parsing error in %s, line %d, column %d (%s)."), - file, - XML_GetErrorLineNumber(parser), - XML_GetErrorColumnNumber(parser) + 1, - XML_ErrorString(XML_GetErrorCode(parser))); - cpi_unlock_context(context); - } - if (!i || plcontext->state == PARSER_ERROR) { - status = CP_ERR_MALFORMED; - break; - } - - if (bytes_read == 0) { - break; - } - } - if (status == CP_OK) { - if (plcontext->state != PARSER_END || plcontext->error_count > 0) { - status = CP_ERR_MALFORMED; - } - if (plcontext->resource_error_count > 0) { - status = CP_ERR_RESOURCE; - } - } - if (status != CP_OK) { - break; - } - - // Initialize the plug-in path - *(file + path_len) = '\0'; - plcontext->plugin->plugin_path = file; - file = NULL; - - // Increase plug-in usage count - if ((status = cpi_register_info(context, plcontext->plugin, (void (*)(cp_context_t *, void *)) dealloc_plugin_info)) != CP_OK) { - break; - } - - } while (0); - - // Report possible errors - if (status != CP_OK) { - switch (status) { - case CP_ERR_MALFORMED: - cpi_errorf(context, - N_("Plug-in descriptor in %s is invalid."), path); - break; - case CP_ERR_IO: - cpi_errorf(context, - N_("An I/O error occurred while loading a plug-in descriptor from %s."), path); - break; - case CP_ERR_RESOURCE: - cpi_errorf(context, - N_("Insufficient system resources to load a plug-in descriptor from %s."), path); - break; - default: - cpi_errorf(context, - N_("Failed to load a plug-in descriptor from %s."), path); - break; - } - } - cpi_unlock_context(context); - - // Release persistently allocated data on failure - if (status != CP_OK) { - if (file != NULL) { - free(file); - file = NULL; - } - if (plcontext != NULL && plcontext->plugin != NULL) { - cpi_free_plugin(plcontext->plugin); - plcontext->plugin = NULL; - } - } - - // Otherwise copy the plug-in pointer - else { - plugin = plcontext->plugin; - } - - // Release data allocated for parsing - if (parser != NULL) { - XML_ParserFree(parser); - } - if (fh != NULL) { - fclose(fh); - } - if (plcontext != NULL) { - if (plcontext->value != NULL) { - free(plcontext->value); - } - free(plcontext); - plcontext = NULL; - } - - // Return error code - if (error != NULL) { - *error = status; - } - - return plugin; -} diff --git a/xbmc/lib/cpluff-0.1.3/libtool b/xbmc/lib/cpluff-0.1.3/libtool deleted file mode 100755 index 9188b37e6d..0000000000 --- a/xbmc/lib/cpluff-0.1.3/libtool +++ /dev/null @@ -1,8906 +0,0 @@ -#! /bin/bash - -# libtool - Provide generalized library-building support services. -# Generated automatically by config.status (cpluff) 0.1.3 -# Libtool was configured on host studio: -# NOTE: Changes made to this file will be lost: look at ltmain.sh. -# -# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, -# 2006, 2007, 2008 Free Software Foundation, Inc. -# Written by Gordon Matzigkeit, 1996 -# -# This file is part of GNU Libtool. -# -# GNU Libtool is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License as -# published by the Free Software Foundation; either version 2 of -# the License, or (at your option) any later version. -# -# As a special exception to the GNU General Public License, -# if you distribute this file as part of a program or library that -# is built using GNU Libtool, you may include this file under the -# same distribution terms that you use for the rest of that program. -# -# GNU Libtool is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with GNU Libtool; see the file COPYING. If not, a copy -# can be downloaded from http://www.gnu.org/licenses/gpl.html, or -# obtained by writing to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - - -# The names of the tagged configurations supported by this script. -available_tags="" - -# ### BEGIN LIBTOOL CONFIG - -# Assembler program. -AS=as - -# DLL creation program. -DLLTOOL=dlltool - -# Object dumper program. -OBJDUMP=objdump - -# Which release of libtool.m4 was used? -macro_version=2.2.6 -macro_revision=1.3012 - -# Whether or not to build shared libraries. -build_libtool_libs=yes - -# Whether or not to build static libraries. -build_old_libs=yes - -# What type of objects to build. -pic_mode=default - -# Whether or not to optimize for fast installation. -fast_install=yes - -# The host system. -host_alias= -host=x86_64-unknown-linux-gnu -host_os=linux-gnu - -# The build system. -build_alias= -build=x86_64-unknown-linux-gnu -build_os=linux-gnu - -# A sed program that does not truncate output. -SED="/bin/sed" - -# Sed that helps us avoid accidentally triggering echo(1) options like -n. -Xsed="$SED -e 1s/^X//" - -# A grep program that handles long lines. -GREP="/bin/grep" - -# An ERE matcher. -EGREP="/bin/grep -E" - -# A literal string matcher. -FGREP="/bin/grep -F" - -# A BSD- or MS-compatible name lister. -NM="/usr/bin/nm -B" - -# Whether we need soft or hard links. -LN_S="ln -s" - -# What is the maximum length of a command? -max_cmd_len=1572864 - -# Object file suffix (normally "o"). -objext=o - -# Executable file suffix (normally ""). -exeext= - -# whether the shell understands "unset". -lt_unset=unset - -# turn spaces into newlines. -SP2NL="tr \\040 \\012" - -# turn newlines into spaces. -NL2SP="tr \\015\\012 \\040\\040" - -# How to create reloadable object files. -reload_flag=" -r" -reload_cmds="\$LD\$reload_flag -o \$output\$reload_objs" - -# Method to check whether dependent libraries are shared objects. -deplibs_check_method="pass_all" - -# Command to use when deplibs_check_method == "file_magic". -file_magic_cmd="\$MAGIC_CMD" - -# The archiver. -AR="ar" -AR_FLAGS="cru" - -# A symbol stripping program. -STRIP="strip" - -# Commands used to install an old-style archive. -RANLIB="ranlib" -old_postinstall_cmds="chmod 644 \$oldlib~\$RANLIB \$oldlib" -old_postuninstall_cmds="" - -# A C compiler. -LTCC="gcc" - -# LTCC compiler flags. -LTCFLAGS="-g -O2" - -# Take the output of nm and produce a listing of raw symbols and C names. -global_symbol_pipe="sed -n -e 's/^.*[ ]\\([ABCDGIRSTW][ABCDGIRSTW]*\\)[ ][ ]*\\([_A-Za-z][_A-Za-z0-9]*\\)\$/\\1 \\2 \\2/p'" - -# Transform the output of nm in a proper C declaration. -global_symbol_to_cdecl="sed -n -e 's/^T .* \\(.*\\)\$/extern int \\1();/p' -e 's/^[ABCDGIRSTW]* .* \\(.*\\)\$/extern char \\1;/p'" - -# Transform the output of nm in a C name address pair. -global_symbol_to_c_name_address="sed -n -e 's/^: \\([^ ]*\\) \$/ {\\\"\\1\\\", (void *) 0},/p' -e 's/^[ABCDGIRSTW]* \\([^ ]*\\) \\([^ ]*\\)\$/ {\"\\2\", (void *) \\&\\2},/p'" - -# Transform the output of nm in a C name address pair when lib prefix is needed. -global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \\([^ ]*\\) \$/ {\\\"\\1\\\", (void *) 0},/p' -e 's/^[ABCDGIRSTW]* \\([^ ]*\\) \\(lib[^ ]*\\)\$/ {\"\\2\", (void *) \\&\\2},/p' -e 's/^[ABCDGIRSTW]* \\([^ ]*\\) \\([^ ]*\\)\$/ {\"lib\\2\", (void *) \\&\\2},/p'" - -# The name of the directory that contains temporary libtool files. -objdir=.libs - -# Shell to use when invoking shell scripts. -SHELL="/bin/bash" - -# An echo program that does not interpret backslashes. -ECHO="echo" - -# Used to examine libraries when file_magic_cmd begins with "file". -MAGIC_CMD=file - -# Must we lock files when doing compilation? -need_locks="no" - -# Tool to manipulate archived DWARF debug symbol files on Mac OS X. -DSYMUTIL="" - -# Tool to change global to local symbols on Mac OS X. -NMEDIT="" - -# Tool to manipulate fat objects and archives on Mac OS X. -LIPO="" - -# ldd/readelf like tool for Mach-O binaries on Mac OS X. -OTOOL="" - -# ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4. -OTOOL64="" - -# Old archive suffix (normally "a"). -libext=a - -# Shared library suffix (normally ".so"). -shrext_cmds=".so" - -# The commands to extract the exported symbol list from a shared archive. -extract_expsyms_cmds="" - -# Variables whose values should be saved in libtool wrapper scripts and -# restored at link time. -variables_saved_for_relink="PATH LD_LIBRARY_PATH LD_RUN_PATH GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" - -# Do we need the "lib" prefix for modules? -need_lib_prefix=no - -# Do we need a version for libraries? -need_version=no - -# Library versioning type. -version_type=linux - -# Shared library runtime path variable. -runpath_var=LD_RUN_PATH - -# Shared library path variable. -shlibpath_var=LD_LIBRARY_PATH - -# Is shlibpath searched before the hard-coded library search path? -shlibpath_overrides_runpath=no - -# Format of library name prefix. -libname_spec="lib\$name" - -# List of archive names. First name is the real one, the rest are links. -# The last name is the one that the linker finds with -lNAME -library_names_spec="\${libname}\${release}\${shared_ext}\$versuffix \${libname}\${release}\${shared_ext}\$major \$libname\${shared_ext}" - -# The coded name of the library, if different from the real name. -soname_spec="\${libname}\${release}\${shared_ext}\$major" - -# Command to use after installation of a shared archive. -postinstall_cmds="" - -# Command to use after uninstallation of a shared archive. -postuninstall_cmds="" - -# Commands used to finish a libtool library installation in a directory. -finish_cmds="PATH=\\\"\\\$PATH:/sbin\\\" ldconfig -n \$libdir" - -# As "finish_cmds", except a single script fragment to be evaled but -# not shown. -finish_eval="" - -# Whether we should hardcode library paths into libraries. -hardcode_into_libs=yes - -# Compile-time system search path for libraries. -sys_lib_search_path_spec="/usr/lib/gcc/x86_64-linux-gnu/4.4.1 /usr/lib /lib /usr/lib/x86_64-linux-gnu" - -# Run-time system search path for libraries. -sys_lib_dlsearch_path_spec="/lib /usr/lib /usr/lib32/alsa-lib /usr/lib/alsa-lib /usr/local/lib /lib/x86_64-linux-gnu /usr/lib/x86_64-linux-gnu " - -# Whether dlopen is supported. -dlopen_support=yes - -# Whether dlopen of programs is supported. -dlopen_self=yes - -# Whether dlopen of statically linked programs is supported. -dlopen_self_static=no - -# Commands to strip libraries. -old_striplib="strip --strip-debug" -striplib="strip --strip-unneeded" - - -# The linker used to build libraries. -LD="/usr/bin/ld -m elf_x86_64" - -# Commands used to build an old-style archive. -old_archive_cmds="\$AR \$AR_FLAGS \$oldlib\$oldobjs~\$RANLIB \$oldlib" - -# A language specific compiler. -CC="gcc" - -# Is the compiler the GNU compiler? -with_gcc=yes - -# Compiler flag to turn off builtin functions. -no_builtin_flag=" -fno-builtin" - -# How to pass a linker flag through the compiler. -wl="-Wl," - -# Additional compiler flags for building library objects. -pic_flag=" -fPIC -DPIC" - -# Compiler flag to prevent dynamic linking. -link_static_flag="-static" - -# Does compiler simultaneously support -c and -o options? -compiler_c_o="yes" - -# Whether or not to add -lc for building shared libraries. -build_libtool_need_lc=no - -# Whether or not to disallow shared libs when runtime libs are static. -allow_libtool_libs_with_static_runtimes=no - -# Compiler flag to allow reflexive dlopens. -export_dynamic_flag_spec="\${wl}--export-dynamic" - -# Compiler flag to generate shared objects directly from archives. -whole_archive_flag_spec="\${wl}--whole-archive\$convenience \${wl}--no-whole-archive" - -# Whether the compiler copes with passing no objects directly. -compiler_needs_object="no" - -# Create an old-style archive from a shared archive. -old_archive_from_new_cmds="" - -# Create a temporary old-style archive to link instead of a shared archive. -old_archive_from_expsyms_cmds="" - -# Commands used to build a shared archive. -archive_cmds="\$CC -shared \$libobjs \$deplibs \$compiler_flags \${wl}-soname \$wl\$soname -o \$lib" -archive_expsym_cmds="echo \\\"{ global:\\\" > \$output_objdir/\$libname.ver~ - cat \$export_symbols | sed -e \\\"s/\\\\(.*\\\\)/\\\\1;/\\\" >> \$output_objdir/\$libname.ver~ - echo \\\"local: *; };\\\" >> \$output_objdir/\$libname.ver~ - \$CC -shared \$libobjs \$deplibs \$compiler_flags \${wl}-soname \$wl\$soname \${wl}-version-script \${wl}\$output_objdir/\$libname.ver -o \$lib" - -# Commands used to build a loadable module if different from building -# a shared archive. -module_cmds="" -module_expsym_cmds="" - -# Whether we are building with GNU ld or not. -with_gnu_ld="yes" - -# Flag that allows shared libraries with undefined symbols to be built. -allow_undefined_flag="" - -# Flag that enforces no undefined symbols. -no_undefined_flag="" - -# Flag to hardcode $libdir into a binary during linking. -# This must work even if $libdir does not exist -hardcode_libdir_flag_spec="\${wl}-rpath \${wl}\$libdir" - -# If ld is used when linking, flag to hardcode $libdir into a binary -# during linking. This must work even if $libdir does not exist. -hardcode_libdir_flag_spec_ld="" - -# Whether we need a single "-rpath" flag with a separated argument. -hardcode_libdir_separator="" - -# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes -# DIR into the resulting binary. -hardcode_direct=no - -# Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes -# DIR into the resulting binary and the resulting library dependency is -# "absolute",i.e impossible to change by setting ${shlibpath_var} if the -# library is relocated. -hardcode_direct_absolute=no - -# Set to "yes" if using the -LDIR flag during linking hardcodes DIR -# into the resulting binary. -hardcode_minus_L=no - -# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR -# into the resulting binary. -hardcode_shlibpath_var=unsupported - -# Set to "yes" if building a shared library automatically hardcodes DIR -# into the library and all subsequent libraries and executables linked -# against it. -hardcode_automatic=no - -# Set to yes if linker adds runtime paths of dependent libraries -# to runtime path list. -inherit_rpath=no - -# Whether libtool must link a program against all its dependency libraries. -link_all_deplibs=no - -# Fix the shell variable $srcfile for the compiler. -fix_srcfile_path="" - -# Set to "yes" if exported symbols are required. -always_export_symbols=no - -# The commands to list exported symbols. -export_symbols_cmds="\$NM \$libobjs \$convenience | \$global_symbol_pipe | \$SED 's/.* //' | sort | uniq > \$export_symbols" - -# Symbols that should not be listed in the preloaded symbols. -exclude_expsyms="_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*" - -# Symbols that must always be exported. -include_expsyms="" - -# Commands necessary for linking programs (against libraries) with templates. -prelink_cmds="" - -# Specify filename containing input files. -file_list_spec="" - -# How to hardcode a shared library path into an executable. -hardcode_action=immediate - -# ### END LIBTOOL CONFIG - -# Generated from ltmain.m4sh. - -# ltmain.sh (GNU libtool) 2.2.6 -# Written by Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996 - -# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007 2008 Free Software Foundation, Inc. -# This is free software; see the source for copying conditions. There is NO -# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. - -# GNU Libtool is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# As a special exception to the GNU General Public License, -# if you distribute this file as part of a program or library that -# is built using GNU Libtool, you may include this file under the -# same distribution terms that you use for the rest of that program. -# -# GNU Libtool is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with GNU Libtool; see the file COPYING. If not, a copy -# can be downloaded from http://www.gnu.org/licenses/gpl.html, -# or obtained by writing to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - -# Usage: $progname [OPTION]... [MODE-ARG]... -# -# Provide generalized library-building support services. -# -# --config show all configuration variables -# --debug enable verbose shell tracing -# -n, --dry-run display commands without modifying any files -# --features display basic configuration information and exit -# --mode=MODE use operation mode MODE -# --preserve-dup-deps don't remove duplicate dependency libraries -# --quiet, --silent don't print informational messages -# --tag=TAG use configuration variables from tag TAG -# -v, --verbose print informational messages (default) -# --version print version information -# -h, --help print short or long help message -# -# MODE must be one of the following: -# -# clean remove files from the build directory -# compile compile a source file into a libtool object -# execute automatically set library path, then run a program -# finish complete the installation of libtool libraries -# install install libraries or executables -# link create a library or an executable -# uninstall remove libraries from an installed directory -# -# MODE-ARGS vary depending on the MODE. -# Try `$progname --help --mode=MODE' for a more detailed description of MODE. -# -# When reporting a bug, please describe a test case to reproduce it and -# include the following information: -# -# host-triplet: $host -# shell: $SHELL -# compiler: $LTCC -# compiler flags: $LTCFLAGS -# linker: $LD (gnu? $with_gnu_ld) -# $progname: (GNU libtool) 2.2.6 Debian-2.2.6a-4 -# automake: $automake_version -# autoconf: $autoconf_version -# -# Report bugs to <bug-libtool@gnu.org>. - -PROGRAM=ltmain.sh -PACKAGE=libtool -VERSION="2.2.6 Debian-2.2.6a-4" -TIMESTAMP="" -package_revision=1.3012 - -# Be Bourne compatible -if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then - emulate sh - NULLCMD=: - # Zsh 3.x and 4.x performs word splitting on ${1+"$@"}, which - # is contrary to our usage. Disable this feature. - alias -g '${1+"$@"}'='"$@"' - setopt NO_GLOB_SUBST -else - case `(set -o) 2>/dev/null` in *posix*) set -o posix;; esac -fi -BIN_SH=xpg4; export BIN_SH # for Tru64 -DUALCASE=1; export DUALCASE # for MKS sh - -# NLS nuisances: We save the old values to restore during execute mode. -# Only set LANG and LC_ALL to C if already set. -# These must not be set unconditionally because not all systems understand -# e.g. LANG=C (notably SCO). -lt_user_locale= -lt_safe_locale= -for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES -do - eval "if test \"\${$lt_var+set}\" = set; then - save_$lt_var=\$$lt_var - $lt_var=C - export $lt_var - lt_user_locale=\"$lt_var=\\\$save_\$lt_var; \$lt_user_locale\" - lt_safe_locale=\"$lt_var=C; \$lt_safe_locale\" - fi" -done - -$lt_unset CDPATH - - - - - -: ${CP="cp -f"} -: ${ECHO="echo"} -: ${EGREP="/bin/grep -E"} -: ${FGREP="/bin/grep -F"} -: ${GREP="/bin/grep"} -: ${LN_S="ln -s"} -: ${MAKE="make"} -: ${MKDIR="mkdir"} -: ${MV="mv -f"} -: ${RM="rm -f"} -: ${SED="/bin/sed"} -: ${SHELL="${CONFIG_SHELL-/bin/sh}"} -: ${Xsed="$SED -e 1s/^X//"} - -# Global variables: -EXIT_SUCCESS=0 -EXIT_FAILURE=1 -EXIT_MISMATCH=63 # $? = 63 is used to indicate version mismatch to missing. -EXIT_SKIP=77 # $? = 77 is used to indicate a skipped test to automake. - -exit_status=$EXIT_SUCCESS - -# Make sure IFS has a sensible default -lt_nl=' -' -IFS=" $lt_nl" - -dirname="s,/[^/]*$,," -basename="s,^.*/,," - -# func_dirname_and_basename file append nondir_replacement -# perform func_basename and func_dirname in a single function -# call: -# dirname: Compute the dirname of FILE. If nonempty, -# add APPEND to the result, otherwise set result -# to NONDIR_REPLACEMENT. -# value returned in "$func_dirname_result" -# basename: Compute filename of FILE. -# value retuned in "$func_basename_result" -# Implementation must be kept synchronized with func_dirname -# and func_basename. For efficiency, we do not delegate to -# those functions but instead duplicate the functionality here. -func_dirname_and_basename () -{ - # Extract subdirectory from the argument. - func_dirname_result=`$ECHO "X${1}" | $Xsed -e "$dirname"` - if test "X$func_dirname_result" = "X${1}"; then - func_dirname_result="${3}" - else - func_dirname_result="$func_dirname_result${2}" - fi - func_basename_result=`$ECHO "X${1}" | $Xsed -e "$basename"` -} - -# Generated shell functions inserted here. - -# func_dirname file append nondir_replacement -# Compute the dirname of FILE. If nonempty, add APPEND to the result, -# otherwise set result to NONDIR_REPLACEMENT. -func_dirname () -{ - case ${1} in - */*) func_dirname_result="${1%/*}${2}" ;; - * ) func_dirname_result="${3}" ;; - esac -} - -# func_basename file -func_basename () -{ - func_basename_result="${1##*/}" -} - -# func_dirname_and_basename file append nondir_replacement -# perform func_basename and func_dirname in a single function -# call: -# dirname: Compute the dirname of FILE. If nonempty, -# add APPEND to the result, otherwise set result -# to NONDIR_REPLACEMENT. -# value returned in "$func_dirname_result" -# basename: Compute filename of FILE. -# value retuned in "$func_basename_result" -# Implementation must be kept synchronized with func_dirname -# and func_basename. For efficiency, we do not delegate to -# those functions but instead duplicate the functionality here. -func_dirname_and_basename () -{ - case ${1} in - */*) func_dirname_result="${1%/*}${2}" ;; - * ) func_dirname_result="${3}" ;; - esac - func_basename_result="${1##*/}" -} - -# func_stripname prefix suffix name -# strip PREFIX and SUFFIX off of NAME. -# PREFIX and SUFFIX must not contain globbing or regex special -# characters, hashes, percent signs, but SUFFIX may contain a leading -# dot (in which case that matches only a dot). -func_stripname () -{ - # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are - # positional parameters, so assign one to ordinary parameter first. - func_stripname_result=${3} - func_stripname_result=${func_stripname_result#"${1}"} - func_stripname_result=${func_stripname_result%"${2}"} -} - -# func_opt_split -func_opt_split () -{ - func_opt_split_opt=${1%%=*} - func_opt_split_arg=${1#*=} -} - -# func_lo2o object -func_lo2o () -{ - case ${1} in - *.lo) func_lo2o_result=${1%.lo}.${objext} ;; - *) func_lo2o_result=${1} ;; - esac -} - -# func_xform libobj-or-source -func_xform () -{ - func_xform_result=${1%.*}.lo -} - -# func_arith arithmetic-term... -func_arith () -{ - func_arith_result=$(( $* )) -} - -# func_len string -# STRING may not start with a hyphen. -func_len () -{ - func_len_result=${#1} -} - - -# func_append var value -# Append VALUE to the end of shell variable VAR. -func_append () -{ - eval "$1+=\$2" -} -# Generated shell functions inserted here. - -# Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh -# is ksh but when the shell is invoked as "sh" and the current value of -# the _XPG environment variable is not equal to 1 (one), the special -# positional parameter $0, within a function call, is the name of the -# function. -progpath="$0" - -# The name of this program: -# In the unlikely event $progname began with a '-', it would play havoc with -# func_echo (imagine progname=-n), so we prepend ./ in that case: -func_dirname_and_basename "$progpath" -progname=$func_basename_result -case $progname in - -*) progname=./$progname ;; -esac - -# Make sure we have an absolute path for reexecution: -case $progpath in - [\\/]*|[A-Za-z]:\\*) ;; - *[\\/]*) - progdir=$func_dirname_result - progdir=`cd "$progdir" && pwd` - progpath="$progdir/$progname" - ;; - *) - save_IFS="$IFS" - IFS=: - for progdir in $PATH; do - IFS="$save_IFS" - test -x "$progdir/$progname" && break - done - IFS="$save_IFS" - test -n "$progdir" || progdir=`pwd` - progpath="$progdir/$progname" - ;; -esac - -# Sed substitution that helps us do robust quoting. It backslashifies -# metacharacters that are still active within double-quoted strings. -Xsed="${SED}"' -e 1s/^X//' -sed_quote_subst='s/\([`"$\\]\)/\\\1/g' - -# Same as above, but do not quote variable references. -double_quote_subst='s/\(["`\\]\)/\\\1/g' - -# Re-`\' parameter expansions in output of double_quote_subst that were -# `\'-ed in input to the same. If an odd number of `\' preceded a '$' -# in input to double_quote_subst, that '$' was protected from expansion. -# Since each input `\' is now two `\'s, look for any number of runs of -# four `\'s followed by two `\'s and then a '$'. `\' that '$'. -bs='\\' -bs2='\\\\' -bs4='\\\\\\\\' -dollar='\$' -sed_double_backslash="\ - s/$bs4/&\\ -/g - s/^$bs2$dollar/$bs&/ - s/\\([^$bs]\\)$bs2$dollar/\\1$bs2$bs$dollar/g - s/\n//g" - -# Standard options: -opt_dry_run=false -opt_help=false -opt_quiet=false -opt_verbose=false -opt_warning=: - -# func_echo arg... -# Echo program name prefixed message, along with the current mode -# name if it has been set yet. -func_echo () -{ - $ECHO "$progname${mode+: }$mode: $*" -} - -# func_verbose arg... -# Echo program name prefixed message in verbose mode only. -func_verbose () -{ - $opt_verbose && func_echo ${1+"$@"} - - # A bug in bash halts the script if the last line of a function - # fails when set -e is in force, so we need another command to - # work around that: - : -} - -# func_error arg... -# Echo program name prefixed message to standard error. -func_error () -{ - $ECHO "$progname${mode+: }$mode: "${1+"$@"} 1>&2 -} - -# func_warning arg... -# Echo program name prefixed warning message to standard error. -func_warning () -{ - $opt_warning && $ECHO "$progname${mode+: }$mode: warning: "${1+"$@"} 1>&2 - - # bash bug again: - : -} - -# func_fatal_error arg... -# Echo program name prefixed message to standard error, and exit. -func_fatal_error () -{ - func_error ${1+"$@"} - exit $EXIT_FAILURE -} - -# func_fatal_help arg... -# Echo program name prefixed message to standard error, followed by -# a help hint, and exit. -func_fatal_help () -{ - func_error ${1+"$@"} - func_fatal_error "$help" -} -help="Try \`$progname --help' for more information." ## default - - -# func_grep expression filename -# Check whether EXPRESSION matches any line of FILENAME, without output. -func_grep () -{ - $GREP "$1" "$2" >/dev/null 2>&1 -} - - -# func_mkdir_p directory-path -# Make sure the entire path to DIRECTORY-PATH is available. -func_mkdir_p () -{ - my_directory_path="$1" - my_dir_list= - - if test -n "$my_directory_path" && test "$opt_dry_run" != ":"; then - - # Protect directory names starting with `-' - case $my_directory_path in - -*) my_directory_path="./$my_directory_path" ;; - esac - - # While some portion of DIR does not yet exist... - while test ! -d "$my_directory_path"; do - # ...make a list in topmost first order. Use a colon delimited - # list incase some portion of path contains whitespace. - my_dir_list="$my_directory_path:$my_dir_list" - - # If the last portion added has no slash in it, the list is done - case $my_directory_path in */*) ;; *) break ;; esac - - # ...otherwise throw away the child directory and loop - my_directory_path=`$ECHO "X$my_directory_path" | $Xsed -e "$dirname"` - done - my_dir_list=`$ECHO "X$my_dir_list" | $Xsed -e 's,:*$,,'` - - save_mkdir_p_IFS="$IFS"; IFS=':' - for my_dir in $my_dir_list; do - IFS="$save_mkdir_p_IFS" - # mkdir can fail with a `File exist' error if two processes - # try to create one of the directories concurrently. Don't - # stop in that case! - $MKDIR "$my_dir" 2>/dev/null || : - done - IFS="$save_mkdir_p_IFS" - - # Bail out if we (or some other process) failed to create a directory. - test -d "$my_directory_path" || \ - func_fatal_error "Failed to create \`$1'" - fi -} - - -# func_mktempdir [string] -# Make a temporary directory that won't clash with other running -# libtool processes, and avoids race conditions if possible. If -# given, STRING is the basename for that directory. -func_mktempdir () -{ - my_template="${TMPDIR-/tmp}/${1-$progname}" - - if test "$opt_dry_run" = ":"; then - # Return a directory name, but don't create it in dry-run mode - my_tmpdir="${my_template}-$$" - else - - # If mktemp works, use that first and foremost - my_tmpdir=`mktemp -d "${my_template}-XXXXXXXX" 2>/dev/null` - - if test ! -d "$my_tmpdir"; then - # Failing that, at least try and use $RANDOM to avoid a race - my_tmpdir="${my_template}-${RANDOM-0}$$" - - save_mktempdir_umask=`umask` - umask 0077 - $MKDIR "$my_tmpdir" - umask $save_mktempdir_umask - fi - - # If we're not in dry-run mode, bomb out on failure - test -d "$my_tmpdir" || \ - func_fatal_error "cannot create temporary directory \`$my_tmpdir'" - fi - - $ECHO "X$my_tmpdir" | $Xsed -} - - -# func_quote_for_eval arg -# Aesthetically quote ARG to be evaled later. -# This function returns two values: FUNC_QUOTE_FOR_EVAL_RESULT -# is double-quoted, suitable for a subsequent eval, whereas -# FUNC_QUOTE_FOR_EVAL_UNQUOTED_RESULT has merely all characters -# which are still active within double quotes backslashified. -func_quote_for_eval () -{ - case $1 in - *[\\\`\"\$]*) - func_quote_for_eval_unquoted_result=`$ECHO "X$1" | $Xsed -e "$sed_quote_subst"` ;; - *) - func_quote_for_eval_unquoted_result="$1" ;; - esac - - case $func_quote_for_eval_unquoted_result in - # Double-quote args containing shell metacharacters to delay - # word splitting, command substitution and and variable - # expansion for a subsequent eval. - # Many Bourne shells cannot handle close brackets correctly - # in scan sets, so we specify it separately. - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - func_quote_for_eval_result="\"$func_quote_for_eval_unquoted_result\"" - ;; - *) - func_quote_for_eval_result="$func_quote_for_eval_unquoted_result" - esac -} - - -# func_quote_for_expand arg -# Aesthetically quote ARG to be evaled later; same as above, -# but do not quote variable references. -func_quote_for_expand () -{ - case $1 in - *[\\\`\"]*) - my_arg=`$ECHO "X$1" | $Xsed \ - -e "$double_quote_subst" -e "$sed_double_backslash"` ;; - *) - my_arg="$1" ;; - esac - - case $my_arg in - # Double-quote args containing shell metacharacters to delay - # word splitting and command substitution for a subsequent eval. - # Many Bourne shells cannot handle close brackets correctly - # in scan sets, so we specify it separately. - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - my_arg="\"$my_arg\"" - ;; - esac - - func_quote_for_expand_result="$my_arg" -} - - -# func_show_eval cmd [fail_exp] -# Unless opt_silent is true, then output CMD. Then, if opt_dryrun is -# not true, evaluate CMD. If the evaluation of CMD fails, and FAIL_EXP -# is given, then evaluate it. -func_show_eval () -{ - my_cmd="$1" - my_fail_exp="${2-:}" - - ${opt_silent-false} || { - func_quote_for_expand "$my_cmd" - eval "func_echo $func_quote_for_expand_result" - } - - if ${opt_dry_run-false}; then :; else - eval "$my_cmd" - my_status=$? - if test "$my_status" -eq 0; then :; else - eval "(exit $my_status); $my_fail_exp" - fi - fi -} - - -# func_show_eval_locale cmd [fail_exp] -# Unless opt_silent is true, then output CMD. Then, if opt_dryrun is -# not true, evaluate CMD. If the evaluation of CMD fails, and FAIL_EXP -# is given, then evaluate it. Use the saved locale for evaluation. -func_show_eval_locale () -{ - my_cmd="$1" - my_fail_exp="${2-:}" - - ${opt_silent-false} || { - func_quote_for_expand "$my_cmd" - eval "func_echo $func_quote_for_expand_result" - } - - if ${opt_dry_run-false}; then :; else - eval "$lt_user_locale - $my_cmd" - my_status=$? - eval "$lt_safe_locale" - if test "$my_status" -eq 0; then :; else - eval "(exit $my_status); $my_fail_exp" - fi - fi -} - - - - - -# func_version -# Echo version message to standard output and exit. -func_version () -{ - $SED -n '/^# '$PROGRAM' (GNU /,/# warranty; / { - s/^# // - s/^# *$// - s/\((C)\)[ 0-9,-]*\( [1-9][0-9]*\)/\1\2/ - p - }' < "$progpath" - exit $? -} - -# func_usage -# Echo short help message to standard output and exit. -func_usage () -{ - $SED -n '/^# Usage:/,/# -h/ { - s/^# // - s/^# *$// - s/\$progname/'$progname'/ - p - }' < "$progpath" - $ECHO - $ECHO "run \`$progname --help | more' for full usage" - exit $? -} - -# func_help -# Echo long help message to standard output and exit. -func_help () -{ - $SED -n '/^# Usage:/,/# Report bugs to/ { - s/^# // - s/^# *$// - s*\$progname*'$progname'* - s*\$host*'"$host"'* - s*\$SHELL*'"$SHELL"'* - s*\$LTCC*'"$LTCC"'* - s*\$LTCFLAGS*'"$LTCFLAGS"'* - s*\$LD*'"$LD"'* - s/\$with_gnu_ld/'"$with_gnu_ld"'/ - s/\$automake_version/'"`(automake --version) 2>/dev/null |$SED 1q`"'/ - s/\$autoconf_version/'"`(autoconf --version) 2>/dev/null |$SED 1q`"'/ - p - }' < "$progpath" - exit $? -} - -# func_missing_arg argname -# Echo program name prefixed message to standard error and set global -# exit_cmd. -func_missing_arg () -{ - func_error "missing argument for $1" - exit_cmd=exit -} - -exit_cmd=: - - - - - -# Check that we have a working $ECHO. -if test "X$1" = X--no-reexec; then - # Discard the --no-reexec flag, and continue. - shift -elif test "X$1" = X--fallback-echo; then - # Avoid inline document here, it may be left over - : -elif test "X`{ $ECHO '\t'; } 2>/dev/null`" = 'X\t'; then - # Yippee, $ECHO works! - : -else - # Restart under the correct shell, and then maybe $ECHO will work. - exec $SHELL "$progpath" --no-reexec ${1+"$@"} -fi - -if test "X$1" = X--fallback-echo; then - # used as fallback echo - shift - cat <<EOF -$* -EOF - exit $EXIT_SUCCESS -fi - -magic="%%%MAGIC variable%%%" -magic_exe="%%%MAGIC EXE variable%%%" - -# Global variables. -# $mode is unset -nonopt= -execute_dlfiles= -preserve_args= -lo2o="s/\\.lo\$/.${objext}/" -o2lo="s/\\.${objext}\$/.lo/" -extracted_archives= -extracted_serial=0 - -opt_dry_run=false -opt_duplicate_deps=false -opt_silent=false -opt_debug=: - -# If this variable is set in any of the actions, the command in it -# will be execed at the end. This prevents here-documents from being -# left over by shells. -exec_cmd= - -# func_fatal_configuration arg... -# Echo program name prefixed message to standard error, followed by -# a configuration failure hint, and exit. -func_fatal_configuration () -{ - func_error ${1+"$@"} - func_error "See the $PACKAGE documentation for more information." - func_fatal_error "Fatal configuration error." -} - - -# func_config -# Display the configuration for all the tags in this script. -func_config () -{ - re_begincf='^# ### BEGIN LIBTOOL' - re_endcf='^# ### END LIBTOOL' - - # Default configuration. - $SED "1,/$re_begincf CONFIG/d;/$re_endcf CONFIG/,\$d" < "$progpath" - - # Now print the configurations for the tags. - for tagname in $taglist; do - $SED -n "/$re_begincf TAG CONFIG: $tagname\$/,/$re_endcf TAG CONFIG: $tagname\$/p" < "$progpath" - done - - exit $? -} - -# func_features -# Display the features supported by this script. -func_features () -{ - $ECHO "host: $host" - if test "$build_libtool_libs" = yes; then - $ECHO "enable shared libraries" - else - $ECHO "disable shared libraries" - fi - if test "$build_old_libs" = yes; then - $ECHO "enable static libraries" - else - $ECHO "disable static libraries" - fi - - exit $? -} - -# func_enable_tag tagname -# Verify that TAGNAME is valid, and either flag an error and exit, or -# enable the TAGNAME tag. We also add TAGNAME to the global $taglist -# variable here. -func_enable_tag () -{ - # Global variable: - tagname="$1" - - re_begincf="^# ### BEGIN LIBTOOL TAG CONFIG: $tagname\$" - re_endcf="^# ### END LIBTOOL TAG CONFIG: $tagname\$" - sed_extractcf="/$re_begincf/,/$re_endcf/p" - - # Validate tagname. - case $tagname in - *[!-_A-Za-z0-9,/]*) - func_fatal_error "invalid tag name: $tagname" - ;; - esac - - # Don't test for the "default" C tag, as we know it's - # there but not specially marked. - case $tagname in - CC) ;; - *) - if $GREP "$re_begincf" "$progpath" >/dev/null 2>&1; then - taglist="$taglist $tagname" - - # Evaluate the configuration. Be careful to quote the path - # and the sed script, to avoid splitting on whitespace, but - # also don't use non-portable quotes within backquotes within - # quotes we have to do it in 2 steps: - extractedcf=`$SED -n -e "$sed_extractcf" < "$progpath"` - eval "$extractedcf" - else - func_error "ignoring unknown tag $tagname" - fi - ;; - esac -} - -# Parse options once, thoroughly. This comes as soon as possible in -# the script to make things like `libtool --version' happen quickly. -{ - - # Shorthand for --mode=foo, only valid as the first argument - case $1 in - clean|clea|cle|cl) - shift; set dummy --mode clean ${1+"$@"}; shift - ;; - compile|compil|compi|comp|com|co|c) - shift; set dummy --mode compile ${1+"$@"}; shift - ;; - execute|execut|execu|exec|exe|ex|e) - shift; set dummy --mode execute ${1+"$@"}; shift - ;; - finish|finis|fini|fin|fi|f) - shift; set dummy --mode finish ${1+"$@"}; shift - ;; - install|instal|insta|inst|ins|in|i) - shift; set dummy --mode install ${1+"$@"}; shift - ;; - link|lin|li|l) - shift; set dummy --mode link ${1+"$@"}; shift - ;; - uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u) - shift; set dummy --mode uninstall ${1+"$@"}; shift - ;; - esac - - # Parse non-mode specific arguments: - while test "$#" -gt 0; do - opt="$1" - shift - - case $opt in - --config) func_config ;; - - --debug) preserve_args="$preserve_args $opt" - func_echo "enabling shell trace mode" - opt_debug='set -x' - $opt_debug - ;; - - -dlopen) test "$#" -eq 0 && func_missing_arg "$opt" && break - execute_dlfiles="$execute_dlfiles $1" - shift - ;; - - --dry-run | -n) opt_dry_run=: ;; - --features) func_features ;; - --finish) mode="finish" ;; - - --mode) test "$#" -eq 0 && func_missing_arg "$opt" && break - case $1 in - # Valid mode arguments: - clean) ;; - compile) ;; - execute) ;; - finish) ;; - install) ;; - link) ;; - relink) ;; - uninstall) ;; - - # Catch anything else as an error - *) func_error "invalid argument for $opt" - exit_cmd=exit - break - ;; - esac - - mode="$1" - shift - ;; - - --preserve-dup-deps) - opt_duplicate_deps=: ;; - - --quiet|--silent) preserve_args="$preserve_args $opt" - opt_silent=: - ;; - - --verbose| -v) preserve_args="$preserve_args $opt" - opt_silent=false - ;; - - --tag) test "$#" -eq 0 && func_missing_arg "$opt" && break - preserve_args="$preserve_args $opt $1" - func_enable_tag "$1" # tagname is set here - shift - ;; - - # Separate optargs to long options: - -dlopen=*|--mode=*|--tag=*) - func_opt_split "$opt" - set dummy "$func_opt_split_opt" "$func_opt_split_arg" ${1+"$@"} - shift - ;; - - -\?|-h) func_usage ;; - --help) opt_help=: ;; - --version) func_version ;; - - -*) func_fatal_help "unrecognized option \`$opt'" ;; - - *) nonopt="$opt" - break - ;; - esac - done - - - case $host in - *cygwin* | *mingw* | *pw32* | *cegcc*) - # don't eliminate duplications in $postdeps and $predeps - opt_duplicate_compiler_generated_deps=: - ;; - *) - opt_duplicate_compiler_generated_deps=$opt_duplicate_deps - ;; - esac - - # Having warned about all mis-specified options, bail out if - # anything was wrong. - $exit_cmd $EXIT_FAILURE -} - -# func_check_version_match -# Ensure that we are using m4 macros, and libtool script from the same -# release of libtool. -func_check_version_match () -{ - if test "$package_revision" != "$macro_revision"; then - if test "$VERSION" != "$macro_version"; then - if test -z "$macro_version"; then - cat >&2 <<_LT_EOF -$progname: Version mismatch error. This is $PACKAGE $VERSION, but the -$progname: definition of this LT_INIT comes from an older release. -$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION -$progname: and run autoconf again. -_LT_EOF - else - cat >&2 <<_LT_EOF -$progname: Version mismatch error. This is $PACKAGE $VERSION, but the -$progname: definition of this LT_INIT comes from $PACKAGE $macro_version. -$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION -$progname: and run autoconf again. -_LT_EOF - fi - else - cat >&2 <<_LT_EOF -$progname: Version mismatch error. This is $PACKAGE $VERSION, revision $package_revision, -$progname: but the definition of this LT_INIT comes from revision $macro_revision. -$progname: You should recreate aclocal.m4 with macros from revision $package_revision -$progname: of $PACKAGE $VERSION and run autoconf again. -_LT_EOF - fi - - exit $EXIT_MISMATCH - fi -} - - -## ----------- ## -## Main. ## -## ----------- ## - -$opt_help || { - # Sanity checks first: - func_check_version_match - - if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then - func_fatal_configuration "not configured to build any kind of library" - fi - - test -z "$mode" && func_fatal_error "error: you must specify a MODE." - - - # Darwin sucks - eval std_shrext=\"$shrext_cmds\" - - - # Only execute mode is allowed to have -dlopen flags. - if test -n "$execute_dlfiles" && test "$mode" != execute; then - func_error "unrecognized option \`-dlopen'" - $ECHO "$help" 1>&2 - exit $EXIT_FAILURE - fi - - # Change the help message to a mode-specific one. - generic_help="$help" - help="Try \`$progname --help --mode=$mode' for more information." -} - - -# func_lalib_p file -# True iff FILE is a libtool `.la' library or `.lo' object file. -# This function is only a basic sanity check; it will hardly flush out -# determined imposters. -func_lalib_p () -{ - test -f "$1" && - $SED -e 4q "$1" 2>/dev/null \ - | $GREP "^# Generated by .*$PACKAGE" > /dev/null 2>&1 -} - -# func_lalib_unsafe_p file -# True iff FILE is a libtool `.la' library or `.lo' object file. -# This function implements the same check as func_lalib_p without -# resorting to external programs. To this end, it redirects stdin and -# closes it afterwards, without saving the original file descriptor. -# As a safety measure, use it only where a negative result would be -# fatal anyway. Works if `file' does not exist. -func_lalib_unsafe_p () -{ - lalib_p=no - if test -f "$1" && test -r "$1" && exec 5<&0 <"$1"; then - for lalib_p_l in 1 2 3 4 - do - read lalib_p_line - case "$lalib_p_line" in - \#\ Generated\ by\ *$PACKAGE* ) lalib_p=yes; break;; - esac - done - exec 0<&5 5<&- - fi - test "$lalib_p" = yes -} - -# func_ltwrapper_script_p file -# True iff FILE is a libtool wrapper script -# This function is only a basic sanity check; it will hardly flush out -# determined imposters. -func_ltwrapper_script_p () -{ - func_lalib_p "$1" -} - -# func_ltwrapper_executable_p file -# True iff FILE is a libtool wrapper executable -# This function is only a basic sanity check; it will hardly flush out -# determined imposters. -func_ltwrapper_executable_p () -{ - func_ltwrapper_exec_suffix= - case $1 in - *.exe) ;; - *) func_ltwrapper_exec_suffix=.exe ;; - esac - $GREP "$magic_exe" "$1$func_ltwrapper_exec_suffix" >/dev/null 2>&1 -} - -# func_ltwrapper_scriptname file -# Assumes file is an ltwrapper_executable -# uses $file to determine the appropriate filename for a -# temporary ltwrapper_script. -func_ltwrapper_scriptname () -{ - func_ltwrapper_scriptname_result="" - if func_ltwrapper_executable_p "$1"; then - func_dirname_and_basename "$1" "" "." - func_stripname '' '.exe' "$func_basename_result" - func_ltwrapper_scriptname_result="$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper" - fi -} - -# func_ltwrapper_p file -# True iff FILE is a libtool wrapper script or wrapper executable -# This function is only a basic sanity check; it will hardly flush out -# determined imposters. -func_ltwrapper_p () -{ - func_ltwrapper_script_p "$1" || func_ltwrapper_executable_p "$1" -} - - -# func_execute_cmds commands fail_cmd -# Execute tilde-delimited COMMANDS. -# If FAIL_CMD is given, eval that upon failure. -# FAIL_CMD may read-access the current command in variable CMD! -func_execute_cmds () -{ - $opt_debug - save_ifs=$IFS; IFS='~' - for cmd in $1; do - IFS=$save_ifs - eval cmd=\"$cmd\" - func_show_eval "$cmd" "${2-:}" - done - IFS=$save_ifs -} - - -# func_source file -# Source FILE, adding directory component if necessary. -# Note that it is not necessary on cygwin/mingw to append a dot to -# FILE even if both FILE and FILE.exe exist: automatic-append-.exe -# behavior happens only for exec(3), not for open(2)! Also, sourcing -# `FILE.' does not work on cygwin managed mounts. -func_source () -{ - $opt_debug - case $1 in - */* | *\\*) . "$1" ;; - *) . "./$1" ;; - esac -} - - -# func_infer_tag arg -# Infer tagged configuration to use if any are available and -# if one wasn't chosen via the "--tag" command line option. -# Only attempt this if the compiler in the base compile -# command doesn't match the default compiler. -# arg is usually of the form 'gcc ...' -func_infer_tag () -{ - $opt_debug - if test -n "$available_tags" && test -z "$tagname"; then - CC_quoted= - for arg in $CC; do - func_quote_for_eval "$arg" - CC_quoted="$CC_quoted $func_quote_for_eval_result" - done - case $@ in - # Blanks in the command may have been stripped by the calling shell, - # but not from the CC environment variable when configure was run. - " $CC "* | "$CC "* | " `$ECHO $CC` "* | "`$ECHO $CC` "* | " $CC_quoted"* | "$CC_quoted "* | " `$ECHO $CC_quoted` "* | "`$ECHO $CC_quoted` "*) ;; - # Blanks at the start of $base_compile will cause this to fail - # if we don't check for them as well. - *) - for z in $available_tags; do - if $GREP "^# ### BEGIN LIBTOOL TAG CONFIG: $z$" < "$progpath" > /dev/null; then - # Evaluate the configuration. - eval "`${SED} -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$z'$/,/^# ### END LIBTOOL TAG CONFIG: '$z'$/p' < $progpath`" - CC_quoted= - for arg in $CC; do - # Double-quote args containing other shell metacharacters. - func_quote_for_eval "$arg" - CC_quoted="$CC_quoted $func_quote_for_eval_result" - done - case "$@ " in - " $CC "* | "$CC "* | " `$ECHO $CC` "* | "`$ECHO $CC` "* | " $CC_quoted"* | "$CC_quoted "* | " `$ECHO $CC_quoted` "* | "`$ECHO $CC_quoted` "*) - # The compiler in the base compile command matches - # the one in the tagged configuration. - # Assume this is the tagged configuration we want. - tagname=$z - break - ;; - esac - fi - done - # If $tagname still isn't set, then no tagged configuration - # was found and let the user know that the "--tag" command - # line option must be used. - if test -z "$tagname"; then - func_echo "unable to infer tagged configuration" - func_fatal_error "specify a tag with \`--tag'" -# else -# func_verbose "using $tagname tagged configuration" - fi - ;; - esac - fi -} - - - -# func_write_libtool_object output_name pic_name nonpic_name -# Create a libtool object file (analogous to a ".la" file), -# but don't create it if we're doing a dry run. -func_write_libtool_object () -{ - write_libobj=${1} - if test "$build_libtool_libs" = yes; then - write_lobj=\'${2}\' - else - write_lobj=none - fi - - if test "$build_old_libs" = yes; then - write_oldobj=\'${3}\' - else - write_oldobj=none - fi - - $opt_dry_run || { - cat >${write_libobj}T <<EOF -# $write_libobj - a libtool object file -# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION -# -# Please DO NOT delete this file! -# It is necessary for linking the library. - -# Name of the PIC object. -pic_object=$write_lobj - -# Name of the non-PIC object -non_pic_object=$write_oldobj - -EOF - $MV "${write_libobj}T" "${write_libobj}" - } -} - -# func_mode_compile arg... -func_mode_compile () -{ - $opt_debug - # Get the compilation command and the source file. - base_compile= - srcfile="$nonopt" # always keep a non-empty value in "srcfile" - suppress_opt=yes - suppress_output= - arg_mode=normal - libobj= - later= - pie_flag= - - for arg - do - case $arg_mode in - arg ) - # do not "continue". Instead, add this to base_compile - lastarg="$arg" - arg_mode=normal - ;; - - target ) - libobj="$arg" - arg_mode=normal - continue - ;; - - normal ) - # Accept any command-line options. - case $arg in - -o) - test -n "$libobj" && \ - func_fatal_error "you cannot specify \`-o' more than once" - arg_mode=target - continue - ;; - - -pie | -fpie | -fPIE) - pie_flag="$pie_flag $arg" - continue - ;; - - -shared | -static | -prefer-pic | -prefer-non-pic) - later="$later $arg" - continue - ;; - - -no-suppress) - suppress_opt=no - continue - ;; - - -Xcompiler) - arg_mode=arg # the next one goes into the "base_compile" arg list - continue # The current "srcfile" will either be retained or - ;; # replaced later. I would guess that would be a bug. - - -Wc,*) - func_stripname '-Wc,' '' "$arg" - args=$func_stripname_result - lastarg= - save_ifs="$IFS"; IFS=',' - for arg in $args; do - IFS="$save_ifs" - func_quote_for_eval "$arg" - lastarg="$lastarg $func_quote_for_eval_result" - done - IFS="$save_ifs" - func_stripname ' ' '' "$lastarg" - lastarg=$func_stripname_result - - # Add the arguments to base_compile. - base_compile="$base_compile $lastarg" - continue - ;; - - *) - # Accept the current argument as the source file. - # The previous "srcfile" becomes the current argument. - # - lastarg="$srcfile" - srcfile="$arg" - ;; - esac # case $arg - ;; - esac # case $arg_mode - - # Aesthetically quote the previous argument. - func_quote_for_eval "$lastarg" - base_compile="$base_compile $func_quote_for_eval_result" - done # for arg - - case $arg_mode in - arg) - func_fatal_error "you must specify an argument for -Xcompile" - ;; - target) - func_fatal_error "you must specify a target with \`-o'" - ;; - *) - # Get the name of the library object. - test -z "$libobj" && { - func_basename "$srcfile" - libobj="$func_basename_result" - } - ;; - esac - - # Recognize several different file suffixes. - # If the user specifies -o file.o, it is replaced with file.lo - case $libobj in - *.[cCFSifmso] | \ - *.ada | *.adb | *.ads | *.asm | \ - *.c++ | *.cc | *.ii | *.class | *.cpp | *.cxx | \ - *.[fF][09]? | *.for | *.java | *.obj | *.sx) - func_xform "$libobj" - libobj=$func_xform_result - ;; - esac - - case $libobj in - *.lo) func_lo2o "$libobj"; obj=$func_lo2o_result ;; - *) - func_fatal_error "cannot determine name of library object from \`$libobj'" - ;; - esac - - func_infer_tag $base_compile - - for arg in $later; do - case $arg in - -shared) - test "$build_libtool_libs" != yes && \ - func_fatal_configuration "can not build a shared library" - build_old_libs=no - continue - ;; - - -static) - build_libtool_libs=no - build_old_libs=yes - continue - ;; - - -prefer-pic) - pic_mode=yes - continue - ;; - - -prefer-non-pic) - pic_mode=no - continue - ;; - esac - done - - func_quote_for_eval "$libobj" - test "X$libobj" != "X$func_quote_for_eval_result" \ - && $ECHO "X$libobj" | $GREP '[]~#^*{};<>?"'"'"' &()|`$[]' \ - && func_warning "libobj name \`$libobj' may not contain shell special characters." - func_dirname_and_basename "$obj" "/" "" - objname="$func_basename_result" - xdir="$func_dirname_result" - lobj=${xdir}$objdir/$objname - - test -z "$base_compile" && \ - func_fatal_help "you must specify a compilation command" - - # Delete any leftover library objects. - if test "$build_old_libs" = yes; then - removelist="$obj $lobj $libobj ${libobj}T" - else - removelist="$lobj $libobj ${libobj}T" - fi - - # On Cygwin there's no "real" PIC flag so we must build both object types - case $host_os in - cygwin* | mingw* | pw32* | os2* | cegcc*) - pic_mode=default - ;; - esac - if test "$pic_mode" = no && test "$deplibs_check_method" != pass_all; then - # non-PIC code in shared libraries is not supported - pic_mode=default - fi - - # Calculate the filename of the output object if compiler does - # not support -o with -c - if test "$compiler_c_o" = no; then - output_obj=`$ECHO "X$srcfile" | $Xsed -e 's%^.*/%%' -e 's%\.[^.]*$%%'`.${objext} - lockfile="$output_obj.lock" - else - output_obj= - need_locks=no - lockfile= - fi - - # Lock this critical section if it is needed - # We use this script file to make the link, it avoids creating a new file - if test "$need_locks" = yes; then - until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do - func_echo "Waiting for $lockfile to be removed" - sleep 2 - done - elif test "$need_locks" = warn; then - if test -f "$lockfile"; then - $ECHO "\ -*** ERROR, $lockfile exists and contains: -`cat $lockfile 2>/dev/null` - -This indicates that another process is trying to use the same -temporary object file, and libtool could not work around it because -your compiler does not support \`-c' and \`-o' together. If you -repeat this compilation, it may succeed, by chance, but you had better -avoid parallel builds (make -j) in this platform, or get a better -compiler." - - $opt_dry_run || $RM $removelist - exit $EXIT_FAILURE - fi - removelist="$removelist $output_obj" - $ECHO "$srcfile" > "$lockfile" - fi - - $opt_dry_run || $RM $removelist - removelist="$removelist $lockfile" - trap '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' 1 2 15 - - if test -n "$fix_srcfile_path"; then - eval srcfile=\"$fix_srcfile_path\" - fi - func_quote_for_eval "$srcfile" - qsrcfile=$func_quote_for_eval_result - - # Only build a PIC object if we are building libtool libraries. - if test "$build_libtool_libs" = yes; then - # Without this assignment, base_compile gets emptied. - fbsd_hideous_sh_bug=$base_compile - - if test "$pic_mode" != no; then - command="$base_compile $qsrcfile $pic_flag" - else - # Don't build PIC code - command="$base_compile $qsrcfile" - fi - - func_mkdir_p "$xdir$objdir" - - if test -z "$output_obj"; then - # Place PIC objects in $objdir - command="$command -o $lobj" - fi - - func_show_eval_locale "$command" \ - 'test -n "$output_obj" && $RM $removelist; exit $EXIT_FAILURE' - - if test "$need_locks" = warn && - test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then - $ECHO "\ -*** ERROR, $lockfile contains: -`cat $lockfile 2>/dev/null` - -but it should contain: -$srcfile - -This indicates that another process is trying to use the same -temporary object file, and libtool could not work around it because -your compiler does not support \`-c' and \`-o' together. If you -repeat this compilation, it may succeed, by chance, but you had better -avoid parallel builds (make -j) in this platform, or get a better -compiler." - - $opt_dry_run || $RM $removelist - exit $EXIT_FAILURE - fi - - # Just move the object if needed, then go on to compile the next one - if test -n "$output_obj" && test "X$output_obj" != "X$lobj"; then - func_show_eval '$MV "$output_obj" "$lobj"' \ - 'error=$?; $opt_dry_run || $RM $removelist; exit $error' - fi - - # Allow error messages only from the first compilation. - if test "$suppress_opt" = yes; then - suppress_output=' >/dev/null 2>&1' - fi - fi - - # Only build a position-dependent object if we build old libraries. - if test "$build_old_libs" = yes; then - if test "$pic_mode" != yes; then - # Don't build PIC code - command="$base_compile $qsrcfile$pie_flag" - else - command="$base_compile $qsrcfile $pic_flag" - fi - if test "$compiler_c_o" = yes; then - command="$command -o $obj" - fi - - # Suppress compiler output if we already did a PIC compilation. - command="$command$suppress_output" - func_show_eval_locale "$command" \ - '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' - - if test "$need_locks" = warn && - test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then - $ECHO "\ -*** ERROR, $lockfile contains: -`cat $lockfile 2>/dev/null` - -but it should contain: -$srcfile - -This indicates that another process is trying to use the same -temporary object file, and libtool could not work around it because -your compiler does not support \`-c' and \`-o' together. If you -repeat this compilation, it may succeed, by chance, but you had better -avoid parallel builds (make -j) in this platform, or get a better -compiler." - - $opt_dry_run || $RM $removelist - exit $EXIT_FAILURE - fi - - # Just move the object if needed - if test -n "$output_obj" && test "X$output_obj" != "X$obj"; then - func_show_eval '$MV "$output_obj" "$obj"' \ - 'error=$?; $opt_dry_run || $RM $removelist; exit $error' - fi - fi - - $opt_dry_run || { - func_write_libtool_object "$libobj" "$objdir/$objname" "$objname" - - # Unlock the critical section if it was locked - if test "$need_locks" != no; then - removelist=$lockfile - $RM "$lockfile" - fi - } - - exit $EXIT_SUCCESS -} - -$opt_help || { -test "$mode" = compile && func_mode_compile ${1+"$@"} -} - -func_mode_help () -{ - # We need to display help for each of the modes. - case $mode in - "") - # Generic help is extracted from the usage comments - # at the start of this file. - func_help - ;; - - clean) - $ECHO \ -"Usage: $progname [OPTION]... --mode=clean RM [RM-OPTION]... FILE... - -Remove files from the build directory. - -RM is the name of the program to use to delete files associated with each FILE -(typically \`/bin/rm'). RM-OPTIONS are options (such as \`-f') to be passed -to RM. - -If FILE is a libtool library, object or program, all the files associated -with it are deleted. Otherwise, only FILE itself is deleted using RM." - ;; - - compile) - $ECHO \ -"Usage: $progname [OPTION]... --mode=compile COMPILE-COMMAND... SOURCEFILE - -Compile a source file into a libtool library object. - -This mode accepts the following additional options: - - -o OUTPUT-FILE set the output file name to OUTPUT-FILE - -no-suppress do not suppress compiler output for multiple passes - -prefer-pic try to building PIC objects only - -prefer-non-pic try to building non-PIC objects only - -shared do not build a \`.o' file suitable for static linking - -static only build a \`.o' file suitable for static linking - -COMPILE-COMMAND is a command to be used in creating a \`standard' object file -from the given SOURCEFILE. - -The output file name is determined by removing the directory component from -SOURCEFILE, then substituting the C source code suffix \`.c' with the -library object suffix, \`.lo'." - ;; - - execute) - $ECHO \ -"Usage: $progname [OPTION]... --mode=execute COMMAND [ARGS]... - -Automatically set library path, then run a program. - -This mode accepts the following additional options: - - -dlopen FILE add the directory containing FILE to the library path - -This mode sets the library path environment variable according to \`-dlopen' -flags. - -If any of the ARGS are libtool executable wrappers, then they are translated -into their corresponding uninstalled binary, and any of their required library -directories are added to the library path. - -Then, COMMAND is executed, with ARGS as arguments." - ;; - - finish) - $ECHO \ -"Usage: $progname [OPTION]... --mode=finish [LIBDIR]... - -Complete the installation of libtool libraries. - -Each LIBDIR is a directory that contains libtool libraries. - -The commands that this mode executes may require superuser privileges. Use -the \`--dry-run' option if you just want to see what would be executed." - ;; - - install) - $ECHO \ -"Usage: $progname [OPTION]... --mode=install INSTALL-COMMAND... - -Install executables or libraries. - -INSTALL-COMMAND is the installation command. The first component should be -either the \`install' or \`cp' program. - -The following components of INSTALL-COMMAND are treated specially: - - -inst-prefix PREFIX-DIR Use PREFIX-DIR as a staging area for installation - -The rest of the components are interpreted as arguments to that command (only -BSD-compatible install options are recognized)." - ;; - - link) - $ECHO \ -"Usage: $progname [OPTION]... --mode=link LINK-COMMAND... - -Link object files or libraries together to form another library, or to -create an executable program. - -LINK-COMMAND is a command using the C compiler that you would use to create -a program from several object files. - -The following components of LINK-COMMAND are treated specially: - - -all-static do not do any dynamic linking at all - -avoid-version do not add a version suffix if possible - -dlopen FILE \`-dlpreopen' FILE if it cannot be dlopened at runtime - -dlpreopen FILE link in FILE and add its symbols to lt_preloaded_symbols - -export-dynamic allow symbols from OUTPUT-FILE to be resolved with dlsym(3) - -export-symbols SYMFILE - try to export only the symbols listed in SYMFILE - -export-symbols-regex REGEX - try to export only the symbols matching REGEX - -LLIBDIR search LIBDIR for required installed libraries - -lNAME OUTPUT-FILE requires the installed library libNAME - -module build a library that can dlopened - -no-fast-install disable the fast-install mode - -no-install link a not-installable executable - -no-undefined declare that a library does not refer to external symbols - -o OUTPUT-FILE create OUTPUT-FILE from the specified objects - -objectlist FILE Use a list of object files found in FILE to specify objects - -precious-files-regex REGEX - don't remove output files matching REGEX - -release RELEASE specify package release information - -rpath LIBDIR the created library will eventually be installed in LIBDIR - -R[ ]LIBDIR add LIBDIR to the runtime path of programs and libraries - -shared only do dynamic linking of libtool libraries - -shrext SUFFIX override the standard shared library file extension - -static do not do any dynamic linking of uninstalled libtool libraries - -static-libtool-libs - do not do any dynamic linking of libtool libraries - -version-info CURRENT[:REVISION[:AGE]] - specify library version info [each variable defaults to 0] - -weak LIBNAME declare that the target provides the LIBNAME interface - -All other options (arguments beginning with \`-') are ignored. - -Every other argument is treated as a filename. Files ending in \`.la' are -treated as uninstalled libtool libraries, other files are standard or library -object files. - -If the OUTPUT-FILE ends in \`.la', then a libtool library is created, -only library objects (\`.lo' files) may be specified, and \`-rpath' is -required, except when creating a convenience library. - -If OUTPUT-FILE ends in \`.a' or \`.lib', then a standard library is created -using \`ar' and \`ranlib', or on Windows using \`lib'. - -If OUTPUT-FILE ends in \`.lo' or \`.${objext}', then a reloadable object file -is created, otherwise an executable program is created." - ;; - - uninstall) - $ECHO \ -"Usage: $progname [OPTION]... --mode=uninstall RM [RM-OPTION]... FILE... - -Remove libraries from an installation directory. - -RM is the name of the program to use to delete files associated with each FILE -(typically \`/bin/rm'). RM-OPTIONS are options (such as \`-f') to be passed -to RM. - -If FILE is a libtool library, all the files associated with it are deleted. -Otherwise, only FILE itself is deleted using RM." - ;; - - *) - func_fatal_help "invalid operation mode \`$mode'" - ;; - esac - - $ECHO - $ECHO "Try \`$progname --help' for more information about other modes." - - exit $? -} - - # Now that we've collected a possible --mode arg, show help if necessary - $opt_help && func_mode_help - - -# func_mode_execute arg... -func_mode_execute () -{ - $opt_debug - # The first argument is the command name. - cmd="$nonopt" - test -z "$cmd" && \ - func_fatal_help "you must specify a COMMAND" - - # Handle -dlopen flags immediately. - for file in $execute_dlfiles; do - test -f "$file" \ - || func_fatal_help "\`$file' is not a file" - - dir= - case $file in - *.la) - # Check to see that this really is a libtool archive. - func_lalib_unsafe_p "$file" \ - || func_fatal_help "\`$lib' is not a valid libtool archive" - - # Read the libtool library. - dlname= - library_names= - func_source "$file" - - # Skip this library if it cannot be dlopened. - if test -z "$dlname"; then - # Warn if it was a shared library. - test -n "$library_names" && \ - func_warning "\`$file' was not linked with \`-export-dynamic'" - continue - fi - - func_dirname "$file" "" "." - dir="$func_dirname_result" - - if test -f "$dir/$objdir/$dlname"; then - dir="$dir/$objdir" - else - if test ! -f "$dir/$dlname"; then - func_fatal_error "cannot find \`$dlname' in \`$dir' or \`$dir/$objdir'" - fi - fi - ;; - - *.lo) - # Just add the directory containing the .lo file. - func_dirname "$file" "" "." - dir="$func_dirname_result" - ;; - - *) - func_warning "\`-dlopen' is ignored for non-libtool libraries and objects" - continue - ;; - esac - - # Get the absolute pathname. - absdir=`cd "$dir" && pwd` - test -n "$absdir" && dir="$absdir" - - # Now add the directory to shlibpath_var. - if eval "test -z \"\$$shlibpath_var\""; then - eval "$shlibpath_var=\"\$dir\"" - else - eval "$shlibpath_var=\"\$dir:\$$shlibpath_var\"" - fi - done - - # This variable tells wrapper scripts just to set shlibpath_var - # rather than running their programs. - libtool_execute_magic="$magic" - - # Check if any of the arguments is a wrapper script. - args= - for file - do - case $file in - -*) ;; - *) - # Do a test to see if this is really a libtool program. - if func_ltwrapper_script_p "$file"; then - func_source "$file" - # Transform arg to wrapped name. - file="$progdir/$program" - elif func_ltwrapper_executable_p "$file"; then - func_ltwrapper_scriptname "$file" - func_source "$func_ltwrapper_scriptname_result" - # Transform arg to wrapped name. - file="$progdir/$program" - fi - ;; - esac - # Quote arguments (to preserve shell metacharacters). - func_quote_for_eval "$file" - args="$args $func_quote_for_eval_result" - done - - if test "X$opt_dry_run" = Xfalse; then - if test -n "$shlibpath_var"; then - # Export the shlibpath_var. - eval "export $shlibpath_var" - fi - - # Restore saved environment variables - for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES - do - eval "if test \"\${save_$lt_var+set}\" = set; then - $lt_var=\$save_$lt_var; export $lt_var - else - $lt_unset $lt_var - fi" - done - - # Now prepare to actually exec the command. - exec_cmd="\$cmd$args" - else - # Display what would be done. - if test -n "$shlibpath_var"; then - eval "\$ECHO \"\$shlibpath_var=\$$shlibpath_var\"" - $ECHO "export $shlibpath_var" - fi - $ECHO "$cmd$args" - exit $EXIT_SUCCESS - fi -} - -test "$mode" = execute && func_mode_execute ${1+"$@"} - - -# func_mode_finish arg... -func_mode_finish () -{ - $opt_debug - libdirs="$nonopt" - admincmds= - - if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then - for dir - do - libdirs="$libdirs $dir" - done - - for libdir in $libdirs; do - if test -n "$finish_cmds"; then - # Do each command in the finish commands. - func_execute_cmds "$finish_cmds" 'admincmds="$admincmds -'"$cmd"'"' - fi - if test -n "$finish_eval"; then - # Do the single finish_eval. - eval cmds=\"$finish_eval\" - $opt_dry_run || eval "$cmds" || admincmds="$admincmds - $cmds" - fi - done - fi - - # Exit here if they wanted silent mode. - $opt_silent && exit $EXIT_SUCCESS - - $ECHO "X----------------------------------------------------------------------" | $Xsed - $ECHO "Libraries have been installed in:" - for libdir in $libdirs; do - $ECHO " $libdir" - done - $ECHO - $ECHO "If you ever happen to want to link against installed libraries" - $ECHO "in a given directory, LIBDIR, you must either use libtool, and" - $ECHO "specify the full pathname of the library, or use the \`-LLIBDIR'" - $ECHO "flag during linking and do at least one of the following:" - if test -n "$shlibpath_var"; then - $ECHO " - add LIBDIR to the \`$shlibpath_var' environment variable" - $ECHO " during execution" - fi - if test -n "$runpath_var"; then - $ECHO " - add LIBDIR to the \`$runpath_var' environment variable" - $ECHO " during linking" - fi - if test -n "$hardcode_libdir_flag_spec"; then - libdir=LIBDIR - eval flag=\"$hardcode_libdir_flag_spec\" - - $ECHO " - use the \`$flag' linker flag" - fi - if test -n "$admincmds"; then - $ECHO " - have your system administrator run these commands:$admincmds" - fi - if test -f /etc/ld.so.conf; then - $ECHO " - have your system administrator add LIBDIR to \`/etc/ld.so.conf'" - fi - $ECHO - - $ECHO "See any operating system documentation about shared libraries for" - case $host in - solaris2.[6789]|solaris2.1[0-9]) - $ECHO "more information, such as the ld(1), crle(1) and ld.so(8) manual" - $ECHO "pages." - ;; - *) - $ECHO "more information, such as the ld(1) and ld.so(8) manual pages." - ;; - esac - $ECHO "X----------------------------------------------------------------------" | $Xsed - exit $EXIT_SUCCESS -} - -test "$mode" = finish && func_mode_finish ${1+"$@"} - - -# func_mode_install arg... -func_mode_install () -{ - $opt_debug - # There may be an optional sh(1) argument at the beginning of - # install_prog (especially on Windows NT). - if test "$nonopt" = "$SHELL" || test "$nonopt" = /bin/sh || - # Allow the use of GNU shtool's install command. - $ECHO "X$nonopt" | $GREP shtool >/dev/null; then - # Aesthetically quote it. - func_quote_for_eval "$nonopt" - install_prog="$func_quote_for_eval_result " - arg=$1 - shift - else - install_prog= - arg=$nonopt - fi - - # The real first argument should be the name of the installation program. - # Aesthetically quote it. - func_quote_for_eval "$arg" - install_prog="$install_prog$func_quote_for_eval_result" - - # We need to accept at least all the BSD install flags. - dest= - files= - opts= - prev= - install_type= - isdir=no - stripme= - for arg - do - if test -n "$dest"; then - files="$files $dest" - dest=$arg - continue - fi - - case $arg in - -d) isdir=yes ;; - -f) - case " $install_prog " in - *[\\\ /]cp\ *) ;; - *) prev=$arg ;; - esac - ;; - -g | -m | -o) - prev=$arg - ;; - -s) - stripme=" -s" - continue - ;; - -*) - ;; - *) - # If the previous option needed an argument, then skip it. - if test -n "$prev"; then - prev= - else - dest=$arg - continue - fi - ;; - esac - - # Aesthetically quote the argument. - func_quote_for_eval "$arg" - install_prog="$install_prog $func_quote_for_eval_result" - done - - test -z "$install_prog" && \ - func_fatal_help "you must specify an install program" - - test -n "$prev" && \ - func_fatal_help "the \`$prev' option requires an argument" - - if test -z "$files"; then - if test -z "$dest"; then - func_fatal_help "no file or destination specified" - else - func_fatal_help "you must specify a destination" - fi - fi - - # Strip any trailing slash from the destination. - func_stripname '' '/' "$dest" - dest=$func_stripname_result - - # Check to see that the destination is a directory. - test -d "$dest" && isdir=yes - if test "$isdir" = yes; then - destdir="$dest" - destname= - else - func_dirname_and_basename "$dest" "" "." - destdir="$func_dirname_result" - destname="$func_basename_result" - - # Not a directory, so check to see that there is only one file specified. - set dummy $files; shift - test "$#" -gt 1 && \ - func_fatal_help "\`$dest' is not a directory" - fi - case $destdir in - [\\/]* | [A-Za-z]:[\\/]*) ;; - *) - for file in $files; do - case $file in - *.lo) ;; - *) - func_fatal_help "\`$destdir' must be an absolute directory name" - ;; - esac - done - ;; - esac - - # This variable tells wrapper scripts just to set variables rather - # than running their programs. - libtool_install_magic="$magic" - - staticlibs= - future_libdirs= - current_libdirs= - for file in $files; do - - # Do each installation. - case $file in - *.$libext) - # Do the static libraries later. - staticlibs="$staticlibs $file" - ;; - - *.la) - # Check to see that this really is a libtool archive. - func_lalib_unsafe_p "$file" \ - || func_fatal_help "\`$file' is not a valid libtool archive" - - library_names= - old_library= - relink_command= - func_source "$file" - - # Add the libdir to current_libdirs if it is the destination. - if test "X$destdir" = "X$libdir"; then - case "$current_libdirs " in - *" $libdir "*) ;; - *) current_libdirs="$current_libdirs $libdir" ;; - esac - else - # Note the libdir as a future libdir. - case "$future_libdirs " in - *" $libdir "*) ;; - *) future_libdirs="$future_libdirs $libdir" ;; - esac - fi - - func_dirname "$file" "/" "" - dir="$func_dirname_result" - dir="$dir$objdir" - - if test -n "$relink_command"; then - # Determine the prefix the user has applied to our future dir. - inst_prefix_dir=`$ECHO "X$destdir" | $Xsed -e "s%$libdir\$%%"` - - # Don't allow the user to place us outside of our expected - # location b/c this prevents finding dependent libraries that - # are installed to the same prefix. - # At present, this check doesn't affect windows .dll's that - # are installed into $libdir/../bin (currently, that works fine) - # but it's something to keep an eye on. - test "$inst_prefix_dir" = "$destdir" && \ - func_fatal_error "error: cannot install \`$file' to a directory not ending in $libdir" - - if test -n "$inst_prefix_dir"; then - # Stick the inst_prefix_dir data into the link command. - relink_command=`$ECHO "X$relink_command" | $Xsed -e "s%@inst_prefix_dir@%-inst-prefix-dir $inst_prefix_dir%"` - else - relink_command=`$ECHO "X$relink_command" | $Xsed -e "s%@inst_prefix_dir@%%"` - fi - - func_warning "relinking \`$file'" - func_show_eval "$relink_command" \ - 'func_fatal_error "error: relink \`$file'\'' with the above command before installing it"' - fi - - # See the names of the shared library. - set dummy $library_names; shift - if test -n "$1"; then - realname="$1" - shift - - srcname="$realname" - test -n "$relink_command" && srcname="$realname"T - - # Install the shared library and build the symlinks. - func_show_eval "$install_prog $dir/$srcname $destdir/$realname" \ - 'exit $?' - tstripme="$stripme" - case $host_os in - cygwin* | mingw* | pw32* | cegcc*) - case $realname in - *.dll.a) - tstripme="" - ;; - esac - ;; - esac - if test -n "$tstripme" && test -n "$striplib"; then - func_show_eval "$striplib $destdir/$realname" 'exit $?' - fi - - if test "$#" -gt 0; then - # Delete the old symlinks, and create new ones. - # Try `ln -sf' first, because the `ln' binary might depend on - # the symlink we replace! Solaris /bin/ln does not understand -f, - # so we also need to try rm && ln -s. - for linkname - do - test "$linkname" != "$realname" \ - && func_show_eval "(cd $destdir && { $LN_S -f $realname $linkname || { $RM $linkname && $LN_S $realname $linkname; }; })" - done - fi - - # Do each command in the postinstall commands. - lib="$destdir/$realname" - func_execute_cmds "$postinstall_cmds" 'exit $?' - fi - - # Install the pseudo-library for information purposes. - func_basename "$file" - name="$func_basename_result" - instname="$dir/$name"i - func_show_eval "$install_prog $instname $destdir/$name" 'exit $?' - - # Maybe install the static library, too. - test -n "$old_library" && staticlibs="$staticlibs $dir/$old_library" - ;; - - *.lo) - # Install (i.e. copy) a libtool object. - - # Figure out destination file name, if it wasn't already specified. - if test -n "$destname"; then - destfile="$destdir/$destname" - else - func_basename "$file" - destfile="$func_basename_result" - destfile="$destdir/$destfile" - fi - - # Deduce the name of the destination old-style object file. - case $destfile in - *.lo) - func_lo2o "$destfile" - staticdest=$func_lo2o_result - ;; - *.$objext) - staticdest="$destfile" - destfile= - ;; - *) - func_fatal_help "cannot copy a libtool object to \`$destfile'" - ;; - esac - - # Install the libtool object if requested. - test -n "$destfile" && \ - func_show_eval "$install_prog $file $destfile" 'exit $?' - - # Install the old object if enabled. - if test "$build_old_libs" = yes; then - # Deduce the name of the old-style object file. - func_lo2o "$file" - staticobj=$func_lo2o_result - func_show_eval "$install_prog \$staticobj \$staticdest" 'exit $?' - fi - exit $EXIT_SUCCESS - ;; - - *) - # Figure out destination file name, if it wasn't already specified. - if test -n "$destname"; then - destfile="$destdir/$destname" - else - func_basename "$file" - destfile="$func_basename_result" - destfile="$destdir/$destfile" - fi - - # If the file is missing, and there is a .exe on the end, strip it - # because it is most likely a libtool script we actually want to - # install - stripped_ext="" - case $file in - *.exe) - if test ! -f "$file"; then - func_stripname '' '.exe' "$file" - file=$func_stripname_result - stripped_ext=".exe" - fi - ;; - esac - - # Do a test to see if this is really a libtool program. - case $host in - *cygwin* | *mingw*) - if func_ltwrapper_executable_p "$file"; then - func_ltwrapper_scriptname "$file" - wrapper=$func_ltwrapper_scriptname_result - else - func_stripname '' '.exe' "$file" - wrapper=$func_stripname_result - fi - ;; - *) - wrapper=$file - ;; - esac - if func_ltwrapper_script_p "$wrapper"; then - notinst_deplibs= - relink_command= - - func_source "$wrapper" - - # Check the variables that should have been set. - test -z "$generated_by_libtool_version" && \ - func_fatal_error "invalid libtool wrapper script \`$wrapper'" - - finalize=yes - for lib in $notinst_deplibs; do - # Check to see that each library is installed. - libdir= - if test -f "$lib"; then - func_source "$lib" - fi - libfile="$libdir/"`$ECHO "X$lib" | $Xsed -e 's%^.*/%%g'` ### testsuite: skip nested quoting test - if test -n "$libdir" && test ! -f "$libfile"; then - func_warning "\`$lib' has not been installed in \`$libdir'" - finalize=no - fi - done - - relink_command= - func_source "$wrapper" - - outputname= - if test "$fast_install" = no && test -n "$relink_command"; then - $opt_dry_run || { - if test "$finalize" = yes; then - tmpdir=`func_mktempdir` - func_basename "$file$stripped_ext" - file="$func_basename_result" - outputname="$tmpdir/$file" - # Replace the output file specification. - relink_command=`$ECHO "X$relink_command" | $Xsed -e 's%@OUTPUT@%'"$outputname"'%g'` - - $opt_silent || { - func_quote_for_expand "$relink_command" - eval "func_echo $func_quote_for_expand_result" - } - if eval "$relink_command"; then : - else - func_error "error: relink \`$file' with the above command before installing it" - $opt_dry_run || ${RM}r "$tmpdir" - continue - fi - file="$outputname" - else - func_warning "cannot relink \`$file'" - fi - } - else - # Install the binary that we compiled earlier. - file=`$ECHO "X$file$stripped_ext" | $Xsed -e "s%\([^/]*\)$%$objdir/\1%"` - fi - fi - - # remove .exe since cygwin /usr/bin/install will append another - # one anyway - case $install_prog,$host in - */usr/bin/install*,*cygwin*) - case $file:$destfile in - *.exe:*.exe) - # this is ok - ;; - *.exe:*) - destfile=$destfile.exe - ;; - *:*.exe) - func_stripname '' '.exe' "$destfile" - destfile=$func_stripname_result - ;; - esac - ;; - esac - func_show_eval "$install_prog\$stripme \$file \$destfile" 'exit $?' - $opt_dry_run || if test -n "$outputname"; then - ${RM}r "$tmpdir" - fi - ;; - esac - done - - for file in $staticlibs; do - func_basename "$file" - name="$func_basename_result" - - # Set up the ranlib parameters. - oldlib="$destdir/$name" - - func_show_eval "$install_prog \$file \$oldlib" 'exit $?' - - if test -n "$stripme" && test -n "$old_striplib"; then - func_show_eval "$old_striplib $oldlib" 'exit $?' - fi - - # Do each command in the postinstall commands. - func_execute_cmds "$old_postinstall_cmds" 'exit $?' - done - - test -n "$future_libdirs" && \ - func_warning "remember to run \`$progname --finish$future_libdirs'" - - if test -n "$current_libdirs"; then - # Maybe just do a dry run. - $opt_dry_run && current_libdirs=" -n$current_libdirs" - exec_cmd='$SHELL $progpath $preserve_args --finish$current_libdirs' - else - exit $EXIT_SUCCESS - fi -} - -test "$mode" = install && func_mode_install ${1+"$@"} - - -# func_generate_dlsyms outputname originator pic_p -# Extract symbols from dlprefiles and create ${outputname}S.o with -# a dlpreopen symbol table. -func_generate_dlsyms () -{ - $opt_debug - my_outputname="$1" - my_originator="$2" - my_pic_p="${3-no}" - my_prefix=`$ECHO "$my_originator" | sed 's%[^a-zA-Z0-9]%_%g'` - my_dlsyms= - - if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then - if test -n "$NM" && test -n "$global_symbol_pipe"; then - my_dlsyms="${my_outputname}S.c" - else - func_error "not configured to extract global symbols from dlpreopened files" - fi - fi - - if test -n "$my_dlsyms"; then - case $my_dlsyms in - "") ;; - *.c) - # Discover the nlist of each of the dlfiles. - nlist="$output_objdir/${my_outputname}.nm" - - func_show_eval "$RM $nlist ${nlist}S ${nlist}T" - - # Parse the name list into a source file. - func_verbose "creating $output_objdir/$my_dlsyms" - - $opt_dry_run || $ECHO > "$output_objdir/$my_dlsyms" "\ -/* $my_dlsyms - symbol resolution table for \`$my_outputname' dlsym emulation. */ -/* Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION */ - -#ifdef __cplusplus -extern \"C\" { -#endif - -/* External symbol declarations for the compiler. */\ -" - - if test "$dlself" = yes; then - func_verbose "generating symbol list for \`$output'" - - $opt_dry_run || echo ': @PROGRAM@ ' > "$nlist" - - # Add our own program objects to the symbol list. - progfiles=`$ECHO "X$objs$old_deplibs" | $SP2NL | $Xsed -e "$lo2o" | $NL2SP` - for progfile in $progfiles; do - func_verbose "extracting global C symbols from \`$progfile'" - $opt_dry_run || eval "$NM $progfile | $global_symbol_pipe >> '$nlist'" - done - - if test -n "$exclude_expsyms"; then - $opt_dry_run || { - eval '$EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T' - eval '$MV "$nlist"T "$nlist"' - } - fi - - if test -n "$export_symbols_regex"; then - $opt_dry_run || { - eval '$EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T' - eval '$MV "$nlist"T "$nlist"' - } - fi - - # Prepare the list of exported symbols - if test -z "$export_symbols"; then - export_symbols="$output_objdir/$outputname.exp" - $opt_dry_run || { - $RM $export_symbols - eval "${SED} -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"' - case $host in - *cygwin* | *mingw* | *cegcc* ) - eval "echo EXPORTS "'> "$output_objdir/$outputname.def"' - eval 'cat "$export_symbols" >> "$output_objdir/$outputname.def"' - ;; - esac - } - else - $opt_dry_run || { - eval "${SED} -e 's/\([].[*^$]\)/\\\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$outputname.exp"' - eval '$GREP -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T' - eval '$MV "$nlist"T "$nlist"' - case $host in - *cygwin | *mingw* | *cegcc* ) - eval "echo EXPORTS "'> "$output_objdir/$outputname.def"' - eval 'cat "$nlist" >> "$output_objdir/$outputname.def"' - ;; - esac - } - fi - fi - - for dlprefile in $dlprefiles; do - func_verbose "extracting global C symbols from \`$dlprefile'" - func_basename "$dlprefile" - name="$func_basename_result" - $opt_dry_run || { - eval '$ECHO ": $name " >> "$nlist"' - eval "$NM $dlprefile 2>/dev/null | $global_symbol_pipe >> '$nlist'" - } - done - - $opt_dry_run || { - # Make sure we have at least an empty file. - test -f "$nlist" || : > "$nlist" - - if test -n "$exclude_expsyms"; then - $EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T - $MV "$nlist"T "$nlist" - fi - - # Try sorting and uniquifying the output. - if $GREP -v "^: " < "$nlist" | - if sort -k 3 </dev/null >/dev/null 2>&1; then - sort -k 3 - else - sort +2 - fi | - uniq > "$nlist"S; then - : - else - $GREP -v "^: " < "$nlist" > "$nlist"S - fi - - if test -f "$nlist"S; then - eval "$global_symbol_to_cdecl"' < "$nlist"S >> "$output_objdir/$my_dlsyms"' - else - $ECHO '/* NONE */' >> "$output_objdir/$my_dlsyms" - fi - - $ECHO >> "$output_objdir/$my_dlsyms" "\ - -/* The mapping between symbol names and symbols. */ -typedef struct { - const char *name; - void *address; -} lt_dlsymlist; -" - case $host in - *cygwin* | *mingw* | *cegcc* ) - $ECHO >> "$output_objdir/$my_dlsyms" "\ -/* DATA imports from DLLs on WIN32 con't be const, because - runtime relocations are performed -- see ld's documentation - on pseudo-relocs. */" - lt_dlsym_const= ;; - *osf5*) - echo >> "$output_objdir/$my_dlsyms" "\ -/* This system does not cope well with relocations in const data */" - lt_dlsym_const= ;; - *) - lt_dlsym_const=const ;; - esac - - $ECHO >> "$output_objdir/$my_dlsyms" "\ -extern $lt_dlsym_const lt_dlsymlist -lt_${my_prefix}_LTX_preloaded_symbols[]; -$lt_dlsym_const lt_dlsymlist -lt_${my_prefix}_LTX_preloaded_symbols[] = -{\ - { \"$my_originator\", (void *) 0 }," - - case $need_lib_prefix in - no) - eval "$global_symbol_to_c_name_address" < "$nlist" >> "$output_objdir/$my_dlsyms" - ;; - *) - eval "$global_symbol_to_c_name_address_lib_prefix" < "$nlist" >> "$output_objdir/$my_dlsyms" - ;; - esac - $ECHO >> "$output_objdir/$my_dlsyms" "\ - {0, (void *) 0} -}; - -/* This works around a problem in FreeBSD linker */ -#ifdef FREEBSD_WORKAROUND -static const void *lt_preloaded_setup() { - return lt_${my_prefix}_LTX_preloaded_symbols; -} -#endif - -#ifdef __cplusplus -} -#endif\ -" - } # !$opt_dry_run - - pic_flag_for_symtable= - case "$compile_command " in - *" -static "*) ;; - *) - case $host in - # compiling the symbol table file with pic_flag works around - # a FreeBSD bug that causes programs to crash when -lm is - # linked before any other PIC object. But we must not use - # pic_flag when linking with -static. The problem exists in - # FreeBSD 2.2.6 and is fixed in FreeBSD 3.1. - *-*-freebsd2*|*-*-freebsd3.0*|*-*-freebsdelf3.0*) - pic_flag_for_symtable=" $pic_flag -DFREEBSD_WORKAROUND" ;; - *-*-hpux*) - pic_flag_for_symtable=" $pic_flag" ;; - *) - if test "X$my_pic_p" != Xno; then - pic_flag_for_symtable=" $pic_flag" - fi - ;; - esac - ;; - esac - symtab_cflags= - for arg in $LTCFLAGS; do - case $arg in - -pie | -fpie | -fPIE) ;; - *) symtab_cflags="$symtab_cflags $arg" ;; - esac - done - - # Now compile the dynamic symbol file. - func_show_eval '(cd $output_objdir && $LTCC$symtab_cflags -c$no_builtin_flag$pic_flag_for_symtable "$my_dlsyms")' 'exit $?' - - # Clean up the generated files. - func_show_eval '$RM "$output_objdir/$my_dlsyms" "$nlist" "${nlist}S" "${nlist}T"' - - # Transform the symbol file into the correct name. - symfileobj="$output_objdir/${my_outputname}S.$objext" - case $host in - *cygwin* | *mingw* | *cegcc* ) - if test -f "$output_objdir/$my_outputname.def"; then - compile_command=`$ECHO "X$compile_command" | $Xsed -e "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"` - finalize_command=`$ECHO "X$finalize_command" | $Xsed -e "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"` - else - compile_command=`$ECHO "X$compile_command" | $Xsed -e "s%@SYMFILE@%$symfileobj%"` - finalize_command=`$ECHO "X$finalize_command" | $Xsed -e "s%@SYMFILE@%$symfileobj%"` - fi - ;; - *) - compile_command=`$ECHO "X$compile_command" | $Xsed -e "s%@SYMFILE@%$symfileobj%"` - finalize_command=`$ECHO "X$finalize_command" | $Xsed -e "s%@SYMFILE@%$symfileobj%"` - ;; - esac - ;; - *) - func_fatal_error "unknown suffix for \`$my_dlsyms'" - ;; - esac - else - # We keep going just in case the user didn't refer to - # lt_preloaded_symbols. The linker will fail if global_symbol_pipe - # really was required. - - # Nullify the symbol file. - compile_command=`$ECHO "X$compile_command" | $Xsed -e "s% @SYMFILE@%%"` - finalize_command=`$ECHO "X$finalize_command" | $Xsed -e "s% @SYMFILE@%%"` - fi -} - -# func_win32_libid arg -# return the library type of file 'arg' -# -# Need a lot of goo to handle *both* DLLs and import libs -# Has to be a shell function in order to 'eat' the argument -# that is supplied when $file_magic_command is called. -func_win32_libid () -{ - $opt_debug - win32_libid_type="unknown" - win32_fileres=`file -L $1 2>/dev/null` - case $win32_fileres in - *ar\ archive\ import\ library*) # definitely import - win32_libid_type="x86 archive import" - ;; - *ar\ archive*) # could be an import, or static - if eval $OBJDUMP -f $1 | $SED -e '10q' 2>/dev/null | - $EGREP 'file format pe-i386(.*architecture: i386)?' >/dev/null ; then - win32_nmres=`eval $NM -f posix -A $1 | - $SED -n -e ' - 1,100{ - / I /{ - s,.*,import, - p - q - } - }'` - case $win32_nmres in - import*) win32_libid_type="x86 archive import";; - *) win32_libid_type="x86 archive static";; - esac - fi - ;; - *DLL*) - win32_libid_type="x86 DLL" - ;; - *executable*) # but shell scripts are "executable" too... - case $win32_fileres in - *MS\ Windows\ PE\ Intel*) - win32_libid_type="x86 DLL" - ;; - esac - ;; - esac - $ECHO "$win32_libid_type" -} - - - -# func_extract_an_archive dir oldlib -func_extract_an_archive () -{ - $opt_debug - f_ex_an_ar_dir="$1"; shift - f_ex_an_ar_oldlib="$1" - func_show_eval "(cd \$f_ex_an_ar_dir && $AR x \"\$f_ex_an_ar_oldlib\")" 'exit $?' - if ($AR t "$f_ex_an_ar_oldlib" | sort | sort -uc >/dev/null 2>&1); then - : - else - func_fatal_error "object name conflicts in archive: $f_ex_an_ar_dir/$f_ex_an_ar_oldlib" - fi -} - - -# func_extract_archives gentop oldlib ... -func_extract_archives () -{ - $opt_debug - my_gentop="$1"; shift - my_oldlibs=${1+"$@"} - my_oldobjs="" - my_xlib="" - my_xabs="" - my_xdir="" - - for my_xlib in $my_oldlibs; do - # Extract the objects. - case $my_xlib in - [\\/]* | [A-Za-z]:[\\/]*) my_xabs="$my_xlib" ;; - *) my_xabs=`pwd`"/$my_xlib" ;; - esac - func_basename "$my_xlib" - my_xlib="$func_basename_result" - my_xlib_u=$my_xlib - while :; do - case " $extracted_archives " in - *" $my_xlib_u "*) - func_arith $extracted_serial + 1 - extracted_serial=$func_arith_result - my_xlib_u=lt$extracted_serial-$my_xlib ;; - *) break ;; - esac - done - extracted_archives="$extracted_archives $my_xlib_u" - my_xdir="$my_gentop/$my_xlib_u" - - func_mkdir_p "$my_xdir" - - case $host in - *-darwin*) - func_verbose "Extracting $my_xabs" - # Do not bother doing anything if just a dry run - $opt_dry_run || { - darwin_orig_dir=`pwd` - cd $my_xdir || exit $? - darwin_archive=$my_xabs - darwin_curdir=`pwd` - darwin_base_archive=`basename "$darwin_archive"` - darwin_arches=`$LIPO -info "$darwin_archive" 2>/dev/null | $GREP Architectures 2>/dev/null || true` - if test -n "$darwin_arches"; then - darwin_arches=`$ECHO "$darwin_arches" | $SED -e 's/.*are://'` - darwin_arch= - func_verbose "$darwin_base_archive has multiple architectures $darwin_arches" - for darwin_arch in $darwin_arches ; do - func_mkdir_p "unfat-$$/${darwin_base_archive}-${darwin_arch}" - $LIPO -thin $darwin_arch -output "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}" "${darwin_archive}" - cd "unfat-$$/${darwin_base_archive}-${darwin_arch}" - func_extract_an_archive "`pwd`" "${darwin_base_archive}" - cd "$darwin_curdir" - $RM "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}" - done # $darwin_arches - ## Okay now we've a bunch of thin objects, gotta fatten them up :) - darwin_filelist=`find unfat-$$ -type f -name \*.o -print -o -name \*.lo -print | $SED -e "$basename" | sort -u` - darwin_file= - darwin_files= - for darwin_file in $darwin_filelist; do - darwin_files=`find unfat-$$ -name $darwin_file -print | $NL2SP` - $LIPO -create -output "$darwin_file" $darwin_files - done # $darwin_filelist - $RM -rf unfat-$$ - cd "$darwin_orig_dir" - else - cd $darwin_orig_dir - func_extract_an_archive "$my_xdir" "$my_xabs" - fi # $darwin_arches - } # !$opt_dry_run - ;; - *) - func_extract_an_archive "$my_xdir" "$my_xabs" - ;; - esac - my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | $NL2SP` - done - - func_extract_archives_result="$my_oldobjs" -} - - - -# func_emit_wrapper_part1 [arg=no] -# -# Emit the first part of a libtool wrapper script on stdout. -# For more information, see the description associated with -# func_emit_wrapper(), below. -func_emit_wrapper_part1 () -{ - func_emit_wrapper_part1_arg1=no - if test -n "$1" ; then - func_emit_wrapper_part1_arg1=$1 - fi - - $ECHO "\ -#! $SHELL - -# $output - temporary wrapper script for $objdir/$outputname -# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION -# -# The $output program cannot be directly executed until all the libtool -# libraries that it depends on are installed. -# -# This wrapper script should never be moved out of the build directory. -# If it is, it will not operate correctly. - -# Sed substitution that helps us do robust quoting. It backslashifies -# metacharacters that are still active within double-quoted strings. -Xsed='${SED} -e 1s/^X//' -sed_quote_subst='$sed_quote_subst' - -# Be Bourne compatible -if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then - emulate sh - NULLCMD=: - # Zsh 3.x and 4.x performs word splitting on \${1+\"\$@\"}, which - # is contrary to our usage. Disable this feature. - alias -g '\${1+\"\$@\"}'='\"\$@\"' - setopt NO_GLOB_SUBST -else - case \`(set -o) 2>/dev/null\` in *posix*) set -o posix;; esac -fi -BIN_SH=xpg4; export BIN_SH # for Tru64 -DUALCASE=1; export DUALCASE # for MKS sh - -# The HP-UX ksh and POSIX shell print the target directory to stdout -# if CDPATH is set. -(unset CDPATH) >/dev/null 2>&1 && unset CDPATH - -relink_command=\"$relink_command\" - -# This environment variable determines our operation mode. -if test \"\$libtool_install_magic\" = \"$magic\"; then - # install mode needs the following variables: - generated_by_libtool_version='$macro_version' - notinst_deplibs='$notinst_deplibs' -else - # When we are sourced in execute mode, \$file and \$ECHO are already set. - if test \"\$libtool_execute_magic\" != \"$magic\"; then - ECHO=\"$qecho\" - file=\"\$0\" - # Make sure echo works. - if test \"X\$1\" = X--no-reexec; then - # Discard the --no-reexec flag, and continue. - shift - elif test \"X\`{ \$ECHO '\t'; } 2>/dev/null\`\" = 'X\t'; then - # Yippee, \$ECHO works! - : - else - # Restart under the correct shell, and then maybe \$ECHO will work. - exec $SHELL \"\$0\" --no-reexec \${1+\"\$@\"} - fi - fi\ -" - $ECHO "\ - - # Find the directory that this script lives in. - thisdir=\`\$ECHO \"X\$file\" | \$Xsed -e 's%/[^/]*$%%'\` - test \"x\$thisdir\" = \"x\$file\" && thisdir=. - - # Follow symbolic links until we get to the real thisdir. - file=\`ls -ld \"\$file\" | ${SED} -n 's/.*-> //p'\` - while test -n \"\$file\"; do - destdir=\`\$ECHO \"X\$file\" | \$Xsed -e 's%/[^/]*\$%%'\` - - # If there was a directory component, then change thisdir. - if test \"x\$destdir\" != \"x\$file\"; then - case \"\$destdir\" in - [\\\\/]* | [A-Za-z]:[\\\\/]*) thisdir=\"\$destdir\" ;; - *) thisdir=\"\$thisdir/\$destdir\" ;; - esac - fi - - file=\`\$ECHO \"X\$file\" | \$Xsed -e 's%^.*/%%'\` - file=\`ls -ld \"\$thisdir/\$file\" | ${SED} -n 's/.*-> //p'\` - done -" -} -# end: func_emit_wrapper_part1 - -# func_emit_wrapper_part2 [arg=no] -# -# Emit the second part of a libtool wrapper script on stdout. -# For more information, see the description associated with -# func_emit_wrapper(), below. -func_emit_wrapper_part2 () -{ - func_emit_wrapper_part2_arg1=no - if test -n "$1" ; then - func_emit_wrapper_part2_arg1=$1 - fi - - $ECHO "\ - - # Usually 'no', except on cygwin/mingw when embedded into - # the cwrapper. - WRAPPER_SCRIPT_BELONGS_IN_OBJDIR=$func_emit_wrapper_part2_arg1 - if test \"\$WRAPPER_SCRIPT_BELONGS_IN_OBJDIR\" = \"yes\"; then - # special case for '.' - if test \"\$thisdir\" = \".\"; then - thisdir=\`pwd\` - fi - # remove .libs from thisdir - case \"\$thisdir\" in - *[\\\\/]$objdir ) thisdir=\`\$ECHO \"X\$thisdir\" | \$Xsed -e 's%[\\\\/][^\\\\/]*$%%'\` ;; - $objdir ) thisdir=. ;; - esac - fi - - # Try to get the absolute directory name. - absdir=\`cd \"\$thisdir\" && pwd\` - test -n \"\$absdir\" && thisdir=\"\$absdir\" -" - - if test "$fast_install" = yes; then - $ECHO "\ - program=lt-'$outputname'$exeext - progdir=\"\$thisdir/$objdir\" - - if test ! -f \"\$progdir/\$program\" || - { file=\`ls -1dt \"\$progdir/\$program\" \"\$progdir/../\$program\" 2>/dev/null | ${SED} 1q\`; \\ - test \"X\$file\" != \"X\$progdir/\$program\"; }; then - - file=\"\$\$-\$program\" - - if test ! -d \"\$progdir\"; then - $MKDIR \"\$progdir\" - else - $RM \"\$progdir/\$file\" - fi" - - $ECHO "\ - - # relink executable if necessary - if test -n \"\$relink_command\"; then - if relink_command_output=\`eval \$relink_command 2>&1\`; then : - else - $ECHO \"\$relink_command_output\" >&2 - $RM \"\$progdir/\$file\" - exit 1 - fi - fi - - $MV \"\$progdir/\$file\" \"\$progdir/\$program\" 2>/dev/null || - { $RM \"\$progdir/\$program\"; - $MV \"\$progdir/\$file\" \"\$progdir/\$program\"; } - $RM \"\$progdir/\$file\" - fi" - else - $ECHO "\ - program='$outputname' - progdir=\"\$thisdir/$objdir\" -" - fi - - $ECHO "\ - - if test -f \"\$progdir/\$program\"; then" - - # Export our shlibpath_var if we have one. - if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then - $ECHO "\ - # Add our own library path to $shlibpath_var - $shlibpath_var=\"$temp_rpath\$$shlibpath_var\" - - # Some systems cannot cope with colon-terminated $shlibpath_var - # The second colon is a workaround for a bug in BeOS R4 sed - $shlibpath_var=\`\$ECHO \"X\$$shlibpath_var\" | \$Xsed -e 's/::*\$//'\` - - export $shlibpath_var -" - fi - - # fixup the dll searchpath if we need to. - if test -n "$dllsearchpath"; then - $ECHO "\ - # Add the dll search path components to the executable PATH - PATH=$dllsearchpath:\$PATH -" - fi - - $ECHO "\ - if test \"\$libtool_execute_magic\" != \"$magic\"; then - # Run the actual program with our arguments. -" - case $host in - # Backslashes separate directories on plain windows - *-*-mingw | *-*-os2* | *-cegcc*) - $ECHO "\ - exec \"\$progdir\\\\\$program\" \${1+\"\$@\"} -" - ;; - - *) - $ECHO "\ - exec \"\$progdir/\$program\" \${1+\"\$@\"} -" - ;; - esac - $ECHO "\ - \$ECHO \"\$0: cannot exec \$program \$*\" 1>&2 - exit 1 - fi - else - # The program doesn't exist. - \$ECHO \"\$0: error: \\\`\$progdir/\$program' does not exist\" 1>&2 - \$ECHO \"This script is just a wrapper for \$program.\" 1>&2 - $ECHO \"See the $PACKAGE documentation for more information.\" 1>&2 - exit 1 - fi -fi\ -" -} -# end: func_emit_wrapper_part2 - - -# func_emit_wrapper [arg=no] -# -# Emit a libtool wrapper script on stdout. -# Don't directly open a file because we may want to -# incorporate the script contents within a cygwin/mingw -# wrapper executable. Must ONLY be called from within -# func_mode_link because it depends on a number of variables -# set therein. -# -# ARG is the value that the WRAPPER_SCRIPT_BELONGS_IN_OBJDIR -# variable will take. If 'yes', then the emitted script -# will assume that the directory in which it is stored is -# the $objdir directory. This is a cygwin/mingw-specific -# behavior. -func_emit_wrapper () -{ - func_emit_wrapper_arg1=no - if test -n "$1" ; then - func_emit_wrapper_arg1=$1 - fi - - # split this up so that func_emit_cwrapperexe_src - # can call each part independently. - func_emit_wrapper_part1 "${func_emit_wrapper_arg1}" - func_emit_wrapper_part2 "${func_emit_wrapper_arg1}" -} - - -# func_to_host_path arg -# -# Convert paths to host format when used with build tools. -# Intended for use with "native" mingw (where libtool itself -# is running under the msys shell), or in the following cross- -# build environments: -# $build $host -# mingw (msys) mingw [e.g. native] -# cygwin mingw -# *nix + wine mingw -# where wine is equipped with the `winepath' executable. -# In the native mingw case, the (msys) shell automatically -# converts paths for any non-msys applications it launches, -# but that facility isn't available from inside the cwrapper. -# Similar accommodations are necessary for $host mingw and -# $build cygwin. Calling this function does no harm for other -# $host/$build combinations not listed above. -# -# ARG is the path (on $build) that should be converted to -# the proper representation for $host. The result is stored -# in $func_to_host_path_result. -func_to_host_path () -{ - func_to_host_path_result="$1" - if test -n "$1" ; then - case $host in - *mingw* ) - lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g' - case $build in - *mingw* ) # actually, msys - # awkward: cmd appends spaces to result - lt_sed_strip_trailing_spaces="s/[ ]*\$//" - func_to_host_path_tmp1=`( cmd //c echo "$1" |\ - $SED -e "$lt_sed_strip_trailing_spaces" ) 2>/dev/null || echo ""` - func_to_host_path_result=`echo "$func_to_host_path_tmp1" |\ - $SED -e "$lt_sed_naive_backslashify"` - ;; - *cygwin* ) - func_to_host_path_tmp1=`cygpath -w "$1"` - func_to_host_path_result=`echo "$func_to_host_path_tmp1" |\ - $SED -e "$lt_sed_naive_backslashify"` - ;; - * ) - # Unfortunately, winepath does not exit with a non-zero - # error code, so we are forced to check the contents of - # stdout. On the other hand, if the command is not - # found, the shell will set an exit code of 127 and print - # *an error message* to stdout. So we must check for both - # error code of zero AND non-empty stdout, which explains - # the odd construction: - func_to_host_path_tmp1=`winepath -w "$1" 2>/dev/null` - if test "$?" -eq 0 && test -n "${func_to_host_path_tmp1}"; then - func_to_host_path_result=`echo "$func_to_host_path_tmp1" |\ - $SED -e "$lt_sed_naive_backslashify"` - else - # Allow warning below. - func_to_host_path_result="" - fi - ;; - esac - if test -z "$func_to_host_path_result" ; then - func_error "Could not determine host path corresponding to" - func_error " '$1'" - func_error "Continuing, but uninstalled executables may not work." - # Fallback: - func_to_host_path_result="$1" - fi - ;; - esac - fi -} -# end: func_to_host_path - -# func_to_host_pathlist arg -# -# Convert pathlists to host format when used with build tools. -# See func_to_host_path(), above. This function supports the -# following $build/$host combinations (but does no harm for -# combinations not listed here): -# $build $host -# mingw (msys) mingw [e.g. native] -# cygwin mingw -# *nix + wine mingw -# -# Path separators are also converted from $build format to -# $host format. If ARG begins or ends with a path separator -# character, it is preserved (but converted to $host format) -# on output. -# -# ARG is a pathlist (on $build) that should be converted to -# the proper representation on $host. The result is stored -# in $func_to_host_pathlist_result. -func_to_host_pathlist () -{ - func_to_host_pathlist_result="$1" - if test -n "$1" ; then - case $host in - *mingw* ) - lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g' - # Remove leading and trailing path separator characters from - # ARG. msys behavior is inconsistent here, cygpath turns them - # into '.;' and ';.', and winepath ignores them completely. - func_to_host_pathlist_tmp2="$1" - # Once set for this call, this variable should not be - # reassigned. It is used in tha fallback case. - func_to_host_pathlist_tmp1=`echo "$func_to_host_pathlist_tmp2" |\ - $SED -e 's|^:*||' -e 's|:*$||'` - case $build in - *mingw* ) # Actually, msys. - # Awkward: cmd appends spaces to result. - lt_sed_strip_trailing_spaces="s/[ ]*\$//" - func_to_host_pathlist_tmp2=`( cmd //c echo "$func_to_host_pathlist_tmp1" |\ - $SED -e "$lt_sed_strip_trailing_spaces" ) 2>/dev/null || echo ""` - func_to_host_pathlist_result=`echo "$func_to_host_pathlist_tmp2" |\ - $SED -e "$lt_sed_naive_backslashify"` - ;; - *cygwin* ) - func_to_host_pathlist_tmp2=`cygpath -w -p "$func_to_host_pathlist_tmp1"` - func_to_host_pathlist_result=`echo "$func_to_host_pathlist_tmp2" |\ - $SED -e "$lt_sed_naive_backslashify"` - ;; - * ) - # unfortunately, winepath doesn't convert pathlists - func_to_host_pathlist_result="" - func_to_host_pathlist_oldIFS=$IFS - IFS=: - for func_to_host_pathlist_f in $func_to_host_pathlist_tmp1 ; do - IFS=$func_to_host_pathlist_oldIFS - if test -n "$func_to_host_pathlist_f" ; then - func_to_host_path "$func_to_host_pathlist_f" - if test -n "$func_to_host_path_result" ; then - if test -z "$func_to_host_pathlist_result" ; then - func_to_host_pathlist_result="$func_to_host_path_result" - else - func_to_host_pathlist_result="$func_to_host_pathlist_result;$func_to_host_path_result" - fi - fi - fi - IFS=: - done - IFS=$func_to_host_pathlist_oldIFS - ;; - esac - if test -z "$func_to_host_pathlist_result" ; then - func_error "Could not determine the host path(s) corresponding to" - func_error " '$1'" - func_error "Continuing, but uninstalled executables may not work." - # Fallback. This may break if $1 contains DOS-style drive - # specifications. The fix is not to complicate the expression - # below, but for the user to provide a working wine installation - # with winepath so that path translation in the cross-to-mingw - # case works properly. - lt_replace_pathsep_nix_to_dos="s|:|;|g" - func_to_host_pathlist_result=`echo "$func_to_host_pathlist_tmp1" |\ - $SED -e "$lt_replace_pathsep_nix_to_dos"` - fi - # Now, add the leading and trailing path separators back - case "$1" in - :* ) func_to_host_pathlist_result=";$func_to_host_pathlist_result" - ;; - esac - case "$1" in - *: ) func_to_host_pathlist_result="$func_to_host_pathlist_result;" - ;; - esac - ;; - esac - fi -} -# end: func_to_host_pathlist - -# func_emit_cwrapperexe_src -# emit the source code for a wrapper executable on stdout -# Must ONLY be called from within func_mode_link because -# it depends on a number of variable set therein. -func_emit_cwrapperexe_src () -{ - cat <<EOF - -/* $cwrappersource - temporary wrapper executable for $objdir/$outputname - Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION - - The $output program cannot be directly executed until all the libtool - libraries that it depends on are installed. - - This wrapper executable should never be moved out of the build directory. - If it is, it will not operate correctly. - - Currently, it simply execs the wrapper *script* "$SHELL $output", - but could eventually absorb all of the scripts functionality and - exec $objdir/$outputname directly. -*/ -EOF - cat <<"EOF" -#include <stdio.h> -#include <stdlib.h> -#ifdef _MSC_VER -# include <direct.h> -# include <process.h> -# include <io.h> -# define setmode _setmode -#else -# include <unistd.h> -# include <stdint.h> -# ifdef __CYGWIN__ -# include <io.h> -# define HAVE_SETENV -# ifdef __STRICT_ANSI__ -char *realpath (const char *, char *); -int putenv (char *); -int setenv (const char *, const char *, int); -# endif -# endif -#endif -#include <malloc.h> -#include <stdarg.h> -#include <assert.h> -#include <string.h> -#include <ctype.h> -#include <errno.h> -#include <fcntl.h> -#include <sys/stat.h> - -#if defined(PATH_MAX) -# define LT_PATHMAX PATH_MAX -#elif defined(MAXPATHLEN) -# define LT_PATHMAX MAXPATHLEN -#else -# define LT_PATHMAX 1024 -#endif - -#ifndef S_IXOTH -# define S_IXOTH 0 -#endif -#ifndef S_IXGRP -# define S_IXGRP 0 -#endif - -#ifdef _MSC_VER -# define S_IXUSR _S_IEXEC -# define stat _stat -# ifndef _INTPTR_T_DEFINED -# define intptr_t int -# endif -#endif - -#ifndef DIR_SEPARATOR -# define DIR_SEPARATOR '/' -# define PATH_SEPARATOR ':' -#endif - -#if defined (_WIN32) || defined (__MSDOS__) || defined (__DJGPP__) || \ - defined (__OS2__) -# define HAVE_DOS_BASED_FILE_SYSTEM -# define FOPEN_WB "wb" -# ifndef DIR_SEPARATOR_2 -# define DIR_SEPARATOR_2 '\\' -# endif -# ifndef PATH_SEPARATOR_2 -# define PATH_SEPARATOR_2 ';' -# endif -#endif - -#ifndef DIR_SEPARATOR_2 -# define IS_DIR_SEPARATOR(ch) ((ch) == DIR_SEPARATOR) -#else /* DIR_SEPARATOR_2 */ -# define IS_DIR_SEPARATOR(ch) \ - (((ch) == DIR_SEPARATOR) || ((ch) == DIR_SEPARATOR_2)) -#endif /* DIR_SEPARATOR_2 */ - -#ifndef PATH_SEPARATOR_2 -# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR) -#else /* PATH_SEPARATOR_2 */ -# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR_2) -#endif /* PATH_SEPARATOR_2 */ - -#ifdef __CYGWIN__ -# define FOPEN_WB "wb" -#endif - -#ifndef FOPEN_WB -# define FOPEN_WB "w" -#endif -#ifndef _O_BINARY -# define _O_BINARY 0 -#endif - -#define XMALLOC(type, num) ((type *) xmalloc ((num) * sizeof(type))) -#define XFREE(stale) do { \ - if (stale) { free ((void *) stale); stale = 0; } \ -} while (0) - -#undef LTWRAPPER_DEBUGPRINTF -#if defined DEBUGWRAPPER -# define LTWRAPPER_DEBUGPRINTF(args) ltwrapper_debugprintf args -static void -ltwrapper_debugprintf (const char *fmt, ...) -{ - va_list args; - va_start (args, fmt); - (void) vfprintf (stderr, fmt, args); - va_end (args); -} -#else -# define LTWRAPPER_DEBUGPRINTF(args) -#endif - -const char *program_name = NULL; - -void *xmalloc (size_t num); -char *xstrdup (const char *string); -const char *base_name (const char *name); -char *find_executable (const char *wrapper); -char *chase_symlinks (const char *pathspec); -int make_executable (const char *path); -int check_executable (const char *path); -char *strendzap (char *str, const char *pat); -void lt_fatal (const char *message, ...); -void lt_setenv (const char *name, const char *value); -char *lt_extend_str (const char *orig_value, const char *add, int to_end); -void lt_opt_process_env_set (const char *arg); -void lt_opt_process_env_prepend (const char *arg); -void lt_opt_process_env_append (const char *arg); -int lt_split_name_value (const char *arg, char** name, char** value); -void lt_update_exe_path (const char *name, const char *value); -void lt_update_lib_path (const char *name, const char *value); - -static const char *script_text_part1 = -EOF - - func_emit_wrapper_part1 yes | - $SED -e 's/\([\\"]\)/\\\1/g' \ - -e 's/^/ "/' -e 's/$/\\n"/' - echo ";" - cat <<EOF - -static const char *script_text_part2 = -EOF - func_emit_wrapper_part2 yes | - $SED -e 's/\([\\"]\)/\\\1/g' \ - -e 's/^/ "/' -e 's/$/\\n"/' - echo ";" - - cat <<EOF -const char * MAGIC_EXE = "$magic_exe"; -const char * LIB_PATH_VARNAME = "$shlibpath_var"; -EOF - - if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then - func_to_host_pathlist "$temp_rpath" - cat <<EOF -const char * LIB_PATH_VALUE = "$func_to_host_pathlist_result"; -EOF - else - cat <<"EOF" -const char * LIB_PATH_VALUE = ""; -EOF - fi - - if test -n "$dllsearchpath"; then - func_to_host_pathlist "$dllsearchpath:" - cat <<EOF -const char * EXE_PATH_VARNAME = "PATH"; -const char * EXE_PATH_VALUE = "$func_to_host_pathlist_result"; -EOF - else - cat <<"EOF" -const char * EXE_PATH_VARNAME = ""; -const char * EXE_PATH_VALUE = ""; -EOF - fi - - if test "$fast_install" = yes; then - cat <<EOF -const char * TARGET_PROGRAM_NAME = "lt-$outputname"; /* hopefully, no .exe */ -EOF - else - cat <<EOF -const char * TARGET_PROGRAM_NAME = "$outputname"; /* hopefully, no .exe */ -EOF - fi - - - cat <<"EOF" - -#define LTWRAPPER_OPTION_PREFIX "--lt-" -#define LTWRAPPER_OPTION_PREFIX_LENGTH 5 - -static const size_t opt_prefix_len = LTWRAPPER_OPTION_PREFIX_LENGTH; -static const char *ltwrapper_option_prefix = LTWRAPPER_OPTION_PREFIX; - -static const char *dumpscript_opt = LTWRAPPER_OPTION_PREFIX "dump-script"; - -static const size_t env_set_opt_len = LTWRAPPER_OPTION_PREFIX_LENGTH + 7; -static const char *env_set_opt = LTWRAPPER_OPTION_PREFIX "env-set"; - /* argument is putenv-style "foo=bar", value of foo is set to bar */ - -static const size_t env_prepend_opt_len = LTWRAPPER_OPTION_PREFIX_LENGTH + 11; -static const char *env_prepend_opt = LTWRAPPER_OPTION_PREFIX "env-prepend"; - /* argument is putenv-style "foo=bar", new value of foo is bar${foo} */ - -static const size_t env_append_opt_len = LTWRAPPER_OPTION_PREFIX_LENGTH + 10; -static const char *env_append_opt = LTWRAPPER_OPTION_PREFIX "env-append"; - /* argument is putenv-style "foo=bar", new value of foo is ${foo}bar */ - -int -main (int argc, char *argv[]) -{ - char **newargz; - int newargc; - char *tmp_pathspec; - char *actual_cwrapper_path; - char *actual_cwrapper_name; - char *target_name; - char *lt_argv_zero; - intptr_t rval = 127; - - int i; - - program_name = (char *) xstrdup (base_name (argv[0])); - LTWRAPPER_DEBUGPRINTF (("(main) argv[0] : %s\n", argv[0])); - LTWRAPPER_DEBUGPRINTF (("(main) program_name : %s\n", program_name)); - - /* very simple arg parsing; don't want to rely on getopt */ - for (i = 1; i < argc; i++) - { - if (strcmp (argv[i], dumpscript_opt) == 0) - { -EOF - case "$host" in - *mingw* | *cygwin* ) - # make stdout use "unix" line endings - echo " setmode(1,_O_BINARY);" - ;; - esac - - cat <<"EOF" - printf ("%s", script_text_part1); - printf ("%s", script_text_part2); - return 0; - } - } - - newargz = XMALLOC (char *, argc + 1); - tmp_pathspec = find_executable (argv[0]); - if (tmp_pathspec == NULL) - lt_fatal ("Couldn't find %s", argv[0]); - LTWRAPPER_DEBUGPRINTF (("(main) found exe (before symlink chase) at : %s\n", - tmp_pathspec)); - - actual_cwrapper_path = chase_symlinks (tmp_pathspec); - LTWRAPPER_DEBUGPRINTF (("(main) found exe (after symlink chase) at : %s\n", - actual_cwrapper_path)); - XFREE (tmp_pathspec); - - actual_cwrapper_name = xstrdup( base_name (actual_cwrapper_path)); - strendzap (actual_cwrapper_path, actual_cwrapper_name); - - /* wrapper name transforms */ - strendzap (actual_cwrapper_name, ".exe"); - tmp_pathspec = lt_extend_str (actual_cwrapper_name, ".exe", 1); - XFREE (actual_cwrapper_name); - actual_cwrapper_name = tmp_pathspec; - tmp_pathspec = 0; - - /* target_name transforms -- use actual target program name; might have lt- prefix */ - target_name = xstrdup (base_name (TARGET_PROGRAM_NAME)); - strendzap (target_name, ".exe"); - tmp_pathspec = lt_extend_str (target_name, ".exe", 1); - XFREE (target_name); - target_name = tmp_pathspec; - tmp_pathspec = 0; - - LTWRAPPER_DEBUGPRINTF (("(main) libtool target name: %s\n", - target_name)); -EOF - - cat <<EOF - newargz[0] = - XMALLOC (char, (strlen (actual_cwrapper_path) + - strlen ("$objdir") + 1 + strlen (actual_cwrapper_name) + 1)); - strcpy (newargz[0], actual_cwrapper_path); - strcat (newargz[0], "$objdir"); - strcat (newargz[0], "/"); -EOF - - cat <<"EOF" - /* stop here, and copy so we don't have to do this twice */ - tmp_pathspec = xstrdup (newargz[0]); - - /* do NOT want the lt- prefix here, so use actual_cwrapper_name */ - strcat (newargz[0], actual_cwrapper_name); - - /* DO want the lt- prefix here if it exists, so use target_name */ - lt_argv_zero = lt_extend_str (tmp_pathspec, target_name, 1); - XFREE (tmp_pathspec); - tmp_pathspec = NULL; -EOF - - case $host_os in - mingw*) - cat <<"EOF" - { - char* p; - while ((p = strchr (newargz[0], '\\')) != NULL) - { - *p = '/'; - } - while ((p = strchr (lt_argv_zero, '\\')) != NULL) - { - *p = '/'; - } - } -EOF - ;; - esac - - cat <<"EOF" - XFREE (target_name); - XFREE (actual_cwrapper_path); - XFREE (actual_cwrapper_name); - - lt_setenv ("BIN_SH", "xpg4"); /* for Tru64 */ - lt_setenv ("DUALCASE", "1"); /* for MSK sh */ - lt_update_lib_path (LIB_PATH_VARNAME, LIB_PATH_VALUE); - lt_update_exe_path (EXE_PATH_VARNAME, EXE_PATH_VALUE); - - newargc=0; - for (i = 1; i < argc; i++) - { - if (strncmp (argv[i], env_set_opt, env_set_opt_len) == 0) - { - if (argv[i][env_set_opt_len] == '=') - { - const char *p = argv[i] + env_set_opt_len + 1; - lt_opt_process_env_set (p); - } - else if (argv[i][env_set_opt_len] == '\0' && i + 1 < argc) - { - lt_opt_process_env_set (argv[++i]); /* don't copy */ - } - else - lt_fatal ("%s missing required argument", env_set_opt); - continue; - } - if (strncmp (argv[i], env_prepend_opt, env_prepend_opt_len) == 0) - { - if (argv[i][env_prepend_opt_len] == '=') - { - const char *p = argv[i] + env_prepend_opt_len + 1; - lt_opt_process_env_prepend (p); - } - else if (argv[i][env_prepend_opt_len] == '\0' && i + 1 < argc) - { - lt_opt_process_env_prepend (argv[++i]); /* don't copy */ - } - else - lt_fatal ("%s missing required argument", env_prepend_opt); - continue; - } - if (strncmp (argv[i], env_append_opt, env_append_opt_len) == 0) - { - if (argv[i][env_append_opt_len] == '=') - { - const char *p = argv[i] + env_append_opt_len + 1; - lt_opt_process_env_append (p); - } - else if (argv[i][env_append_opt_len] == '\0' && i + 1 < argc) - { - lt_opt_process_env_append (argv[++i]); /* don't copy */ - } - else - lt_fatal ("%s missing required argument", env_append_opt); - continue; - } - if (strncmp (argv[i], ltwrapper_option_prefix, opt_prefix_len) == 0) - { - /* however, if there is an option in the LTWRAPPER_OPTION_PREFIX - namespace, but it is not one of the ones we know about and - have already dealt with, above (inluding dump-script), then - report an error. Otherwise, targets might begin to believe - they are allowed to use options in the LTWRAPPER_OPTION_PREFIX - namespace. The first time any user complains about this, we'll - need to make LTWRAPPER_OPTION_PREFIX a configure-time option - or a configure.ac-settable value. - */ - lt_fatal ("Unrecognized option in %s namespace: '%s'", - ltwrapper_option_prefix, argv[i]); - } - /* otherwise ... */ - newargz[++newargc] = xstrdup (argv[i]); - } - newargz[++newargc] = NULL; - - LTWRAPPER_DEBUGPRINTF (("(main) lt_argv_zero : %s\n", (lt_argv_zero ? lt_argv_zero : "<NULL>"))); - for (i = 0; i < newargc; i++) - { - LTWRAPPER_DEBUGPRINTF (("(main) newargz[%d] : %s\n", i, (newargz[i] ? newargz[i] : "<NULL>"))); - } - -EOF - - case $host_os in - mingw*) - cat <<"EOF" - /* execv doesn't actually work on mingw as expected on unix */ - rval = _spawnv (_P_WAIT, lt_argv_zero, (const char * const *) newargz); - if (rval == -1) - { - /* failed to start process */ - LTWRAPPER_DEBUGPRINTF (("(main) failed to launch target \"%s\": errno = %d\n", lt_argv_zero, errno)); - return 127; - } - return rval; -EOF - ;; - *) - cat <<"EOF" - execv (lt_argv_zero, newargz); - return rval; /* =127, but avoids unused variable warning */ -EOF - ;; - esac - - cat <<"EOF" -} - -void * -xmalloc (size_t num) -{ - void *p = (void *) malloc (num); - if (!p) - lt_fatal ("Memory exhausted"); - - return p; -} - -char * -xstrdup (const char *string) -{ - return string ? strcpy ((char *) xmalloc (strlen (string) + 1), - string) : NULL; -} - -const char * -base_name (const char *name) -{ - const char *base; - -#if defined (HAVE_DOS_BASED_FILE_SYSTEM) - /* Skip over the disk name in MSDOS pathnames. */ - if (isalpha ((unsigned char) name[0]) && name[1] == ':') - name += 2; -#endif - - for (base = name; *name; name++) - if (IS_DIR_SEPARATOR (*name)) - base = name + 1; - return base; -} - -int -check_executable (const char *path) -{ - struct stat st; - - LTWRAPPER_DEBUGPRINTF (("(check_executable) : %s\n", - path ? (*path ? path : "EMPTY!") : "NULL!")); - if ((!path) || (!*path)) - return 0; - - if ((stat (path, &st) >= 0) - && (st.st_mode & (S_IXUSR | S_IXGRP | S_IXOTH))) - return 1; - else - return 0; -} - -int -make_executable (const char *path) -{ - int rval = 0; - struct stat st; - - LTWRAPPER_DEBUGPRINTF (("(make_executable) : %s\n", - path ? (*path ? path : "EMPTY!") : "NULL!")); - if ((!path) || (!*path)) - return 0; - - if (stat (path, &st) >= 0) - { - rval = chmod (path, st.st_mode | S_IXOTH | S_IXGRP | S_IXUSR); - } - return rval; -} - -/* Searches for the full path of the wrapper. Returns - newly allocated full path name if found, NULL otherwise - Does not chase symlinks, even on platforms that support them. -*/ -char * -find_executable (const char *wrapper) -{ - int has_slash = 0; - const char *p; - const char *p_next; - /* static buffer for getcwd */ - char tmp[LT_PATHMAX + 1]; - int tmp_len; - char *concat_name; - - LTWRAPPER_DEBUGPRINTF (("(find_executable) : %s\n", - wrapper ? (*wrapper ? wrapper : "EMPTY!") : "NULL!")); - - if ((wrapper == NULL) || (*wrapper == '\0')) - return NULL; - - /* Absolute path? */ -#if defined (HAVE_DOS_BASED_FILE_SYSTEM) - if (isalpha ((unsigned char) wrapper[0]) && wrapper[1] == ':') - { - concat_name = xstrdup (wrapper); - if (check_executable (concat_name)) - return concat_name; - XFREE (concat_name); - } - else - { -#endif - if (IS_DIR_SEPARATOR (wrapper[0])) - { - concat_name = xstrdup (wrapper); - if (check_executable (concat_name)) - return concat_name; - XFREE (concat_name); - } -#if defined (HAVE_DOS_BASED_FILE_SYSTEM) - } -#endif - - for (p = wrapper; *p; p++) - if (*p == '/') - { - has_slash = 1; - break; - } - if (!has_slash) - { - /* no slashes; search PATH */ - const char *path = getenv ("PATH"); - if (path != NULL) - { - for (p = path; *p; p = p_next) - { - const char *q; - size_t p_len; - for (q = p; *q; q++) - if (IS_PATH_SEPARATOR (*q)) - break; - p_len = q - p; - p_next = (*q == '\0' ? q : q + 1); - if (p_len == 0) - { - /* empty path: current directory */ - if (getcwd (tmp, LT_PATHMAX) == NULL) - lt_fatal ("getcwd failed"); - tmp_len = strlen (tmp); - concat_name = - XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1); - memcpy (concat_name, tmp, tmp_len); - concat_name[tmp_len] = '/'; - strcpy (concat_name + tmp_len + 1, wrapper); - } - else - { - concat_name = - XMALLOC (char, p_len + 1 + strlen (wrapper) + 1); - memcpy (concat_name, p, p_len); - concat_name[p_len] = '/'; - strcpy (concat_name + p_len + 1, wrapper); - } - if (check_executable (concat_name)) - return concat_name; - XFREE (concat_name); - } - } - /* not found in PATH; assume curdir */ - } - /* Relative path | not found in path: prepend cwd */ - if (getcwd (tmp, LT_PATHMAX) == NULL) - lt_fatal ("getcwd failed"); - tmp_len = strlen (tmp); - concat_name = XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1); - memcpy (concat_name, tmp, tmp_len); - concat_name[tmp_len] = '/'; - strcpy (concat_name + tmp_len + 1, wrapper); - - if (check_executable (concat_name)) - return concat_name; - XFREE (concat_name); - return NULL; -} - -char * -chase_symlinks (const char *pathspec) -{ -#ifndef S_ISLNK - return xstrdup (pathspec); -#else - char buf[LT_PATHMAX]; - struct stat s; - char *tmp_pathspec = xstrdup (pathspec); - char *p; - int has_symlinks = 0; - while (strlen (tmp_pathspec) && !has_symlinks) - { - LTWRAPPER_DEBUGPRINTF (("checking path component for symlinks: %s\n", - tmp_pathspec)); - if (lstat (tmp_pathspec, &s) == 0) - { - if (S_ISLNK (s.st_mode) != 0) - { - has_symlinks = 1; - break; - } - - /* search backwards for last DIR_SEPARATOR */ - p = tmp_pathspec + strlen (tmp_pathspec) - 1; - while ((p > tmp_pathspec) && (!IS_DIR_SEPARATOR (*p))) - p--; - if ((p == tmp_pathspec) && (!IS_DIR_SEPARATOR (*p))) - { - /* no more DIR_SEPARATORS left */ - break; - } - *p = '\0'; - } - else - { - char *errstr = strerror (errno); - lt_fatal ("Error accessing file %s (%s)", tmp_pathspec, errstr); - } - } - XFREE (tmp_pathspec); - - if (!has_symlinks) - { - return xstrdup (pathspec); - } - - tmp_pathspec = realpath (pathspec, buf); - if (tmp_pathspec == 0) - { - lt_fatal ("Could not follow symlinks for %s", pathspec); - } - return xstrdup (tmp_pathspec); -#endif -} - -char * -strendzap (char *str, const char *pat) -{ - size_t len, patlen; - - assert (str != NULL); - assert (pat != NULL); - - len = strlen (str); - patlen = strlen (pat); - - if (patlen <= len) - { - str += len - patlen; - if (strcmp (str, pat) == 0) - *str = '\0'; - } - return str; -} - -static void -lt_error_core (int exit_status, const char *mode, - const char *message, va_list ap) -{ - fprintf (stderr, "%s: %s: ", program_name, mode); - vfprintf (stderr, message, ap); - fprintf (stderr, ".\n"); - - if (exit_status >= 0) - exit (exit_status); -} - -void -lt_fatal (const char *message, ...) -{ - va_list ap; - va_start (ap, message); - lt_error_core (EXIT_FAILURE, "FATAL", message, ap); - va_end (ap); -} - -void -lt_setenv (const char *name, const char *value) -{ - LTWRAPPER_DEBUGPRINTF (("(lt_setenv) setting '%s' to '%s'\n", - (name ? name : "<NULL>"), - (value ? value : "<NULL>"))); - { -#ifdef HAVE_SETENV - /* always make a copy, for consistency with !HAVE_SETENV */ - char *str = xstrdup (value); - setenv (name, str, 1); -#else - int len = strlen (name) + 1 + strlen (value) + 1; - char *str = XMALLOC (char, len); - sprintf (str, "%s=%s", name, value); - if (putenv (str) != EXIT_SUCCESS) - { - XFREE (str); - } -#endif - } -} - -char * -lt_extend_str (const char *orig_value, const char *add, int to_end) -{ - char *new_value; - if (orig_value && *orig_value) - { - int orig_value_len = strlen (orig_value); - int add_len = strlen (add); - new_value = XMALLOC (char, add_len + orig_value_len + 1); - if (to_end) - { - strcpy (new_value, orig_value); - strcpy (new_value + orig_value_len, add); - } - else - { - strcpy (new_value, add); - strcpy (new_value + add_len, orig_value); - } - } - else - { - new_value = xstrdup (add); - } - return new_value; -} - -int -lt_split_name_value (const char *arg, char** name, char** value) -{ - const char *p; - int len; - if (!arg || !*arg) - return 1; - - p = strchr (arg, (int)'='); - - if (!p) - return 1; - - *value = xstrdup (++p); - - len = strlen (arg) - strlen (*value); - *name = XMALLOC (char, len); - strncpy (*name, arg, len-1); - (*name)[len - 1] = '\0'; - - return 0; -} - -void -lt_opt_process_env_set (const char *arg) -{ - char *name = NULL; - char *value = NULL; - - if (lt_split_name_value (arg, &name, &value) != 0) - { - XFREE (name); - XFREE (value); - lt_fatal ("bad argument for %s: '%s'", env_set_opt, arg); - } - - lt_setenv (name, value); - XFREE (name); - XFREE (value); -} - -void -lt_opt_process_env_prepend (const char *arg) -{ - char *name = NULL; - char *value = NULL; - char *new_value = NULL; - - if (lt_split_name_value (arg, &name, &value) != 0) - { - XFREE (name); - XFREE (value); - lt_fatal ("bad argument for %s: '%s'", env_prepend_opt, arg); - } - - new_value = lt_extend_str (getenv (name), value, 0); - lt_setenv (name, new_value); - XFREE (new_value); - XFREE (name); - XFREE (value); -} - -void -lt_opt_process_env_append (const char *arg) -{ - char *name = NULL; - char *value = NULL; - char *new_value = NULL; - - if (lt_split_name_value (arg, &name, &value) != 0) - { - XFREE (name); - XFREE (value); - lt_fatal ("bad argument for %s: '%s'", env_append_opt, arg); - } - - new_value = lt_extend_str (getenv (name), value, 1); - lt_setenv (name, new_value); - XFREE (new_value); - XFREE (name); - XFREE (value); -} - -void -lt_update_exe_path (const char *name, const char *value) -{ - LTWRAPPER_DEBUGPRINTF (("(lt_update_exe_path) modifying '%s' by prepending '%s'\n", - (name ? name : "<NULL>"), - (value ? value : "<NULL>"))); - - if (name && *name && value && *value) - { - char *new_value = lt_extend_str (getenv (name), value, 0); - /* some systems can't cope with a ':'-terminated path #' */ - int len = strlen (new_value); - while (((len = strlen (new_value)) > 0) && IS_PATH_SEPARATOR (new_value[len-1])) - { - new_value[len-1] = '\0'; - } - lt_setenv (name, new_value); - XFREE (new_value); - } -} - -void -lt_update_lib_path (const char *name, const char *value) -{ - LTWRAPPER_DEBUGPRINTF (("(lt_update_lib_path) modifying '%s' by prepending '%s'\n", - (name ? name : "<NULL>"), - (value ? value : "<NULL>"))); - - if (name && *name && value && *value) - { - char *new_value = lt_extend_str (getenv (name), value, 0); - lt_setenv (name, new_value); - XFREE (new_value); - } -} - - -EOF -} -# end: func_emit_cwrapperexe_src - -# func_mode_link arg... -func_mode_link () -{ - $opt_debug - case $host in - *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*) - # It is impossible to link a dll without this setting, and - # we shouldn't force the makefile maintainer to figure out - # which system we are compiling for in order to pass an extra - # flag for every libtool invocation. - # allow_undefined=no - - # FIXME: Unfortunately, there are problems with the above when trying - # to make a dll which has undefined symbols, in which case not - # even a static library is built. For now, we need to specify - # -no-undefined on the libtool link line when we can be certain - # that all symbols are satisfied, otherwise we get a static library. - allow_undefined=yes - ;; - *) - allow_undefined=yes - ;; - esac - libtool_args=$nonopt - base_compile="$nonopt $@" - compile_command=$nonopt - finalize_command=$nonopt - - compile_rpath= - finalize_rpath= - compile_shlibpath= - finalize_shlibpath= - convenience= - old_convenience= - deplibs= - old_deplibs= - compiler_flags= - linker_flags= - dllsearchpath= - lib_search_path=`pwd` - inst_prefix_dir= - new_inherited_linker_flags= - - avoid_version=no - dlfiles= - dlprefiles= - dlself=no - export_dynamic=no - export_symbols= - export_symbols_regex= - generated= - libobjs= - ltlibs= - module=no - no_install=no - objs= - non_pic_objects= - precious_files_regex= - prefer_static_libs=no - preload=no - prev= - prevarg= - release= - rpath= - xrpath= - perm_rpath= - temp_rpath= - thread_safe=no - vinfo= - vinfo_number=no - weak_libs= - single_module="${wl}-single_module" - func_infer_tag $base_compile - - # We need to know -static, to get the right output filenames. - for arg - do - case $arg in - -shared) - test "$build_libtool_libs" != yes && \ - func_fatal_configuration "can not build a shared library" - build_old_libs=no - break - ;; - -all-static | -static | -static-libtool-libs) - case $arg in - -all-static) - if test "$build_libtool_libs" = yes && test -z "$link_static_flag"; then - func_warning "complete static linking is impossible in this configuration" - fi - if test -n "$link_static_flag"; then - dlopen_self=$dlopen_self_static - fi - prefer_static_libs=yes - ;; - -static) - if test -z "$pic_flag" && test -n "$link_static_flag"; then - dlopen_self=$dlopen_self_static - fi - prefer_static_libs=built - ;; - -static-libtool-libs) - if test -z "$pic_flag" && test -n "$link_static_flag"; then - dlopen_self=$dlopen_self_static - fi - prefer_static_libs=yes - ;; - esac - build_libtool_libs=no - build_old_libs=yes - break - ;; - esac - done - - # See if our shared archives depend on static archives. - test -n "$old_archive_from_new_cmds" && build_old_libs=yes - - # Go through the arguments, transforming them on the way. - while test "$#" -gt 0; do - arg="$1" - shift - func_quote_for_eval "$arg" - qarg=$func_quote_for_eval_unquoted_result - func_append libtool_args " $func_quote_for_eval_result" - - # If the previous option needs an argument, assign it. - if test -n "$prev"; then - case $prev in - output) - func_append compile_command " @OUTPUT@" - func_append finalize_command " @OUTPUT@" - ;; - esac - - case $prev in - dlfiles|dlprefiles) - if test "$preload" = no; then - # Add the symbol object into the linking commands. - func_append compile_command " @SYMFILE@" - func_append finalize_command " @SYMFILE@" - preload=yes - fi - case $arg in - *.la | *.lo) ;; # We handle these cases below. - force) - if test "$dlself" = no; then - dlself=needless - export_dynamic=yes - fi - prev= - continue - ;; - self) - if test "$prev" = dlprefiles; then - dlself=yes - elif test "$prev" = dlfiles && test "$dlopen_self" != yes; then - dlself=yes - else - dlself=needless - export_dynamic=yes - fi - prev= - continue - ;; - *) - if test "$prev" = dlfiles; then - dlfiles="$dlfiles $arg" - else - dlprefiles="$dlprefiles $arg" - fi - prev= - continue - ;; - esac - ;; - expsyms) - export_symbols="$arg" - test -f "$arg" \ - || func_fatal_error "symbol file \`$arg' does not exist" - prev= - continue - ;; - expsyms_regex) - export_symbols_regex="$arg" - prev= - continue - ;; - framework) - case $host in - *-*-darwin*) - case "$deplibs " in - *" $qarg.ltframework "*) ;; - *) deplibs="$deplibs $qarg.ltframework" # this is fixed later - ;; - esac - ;; - esac - prev= - continue - ;; - inst_prefix) - inst_prefix_dir="$arg" - prev= - continue - ;; - objectlist) - if test -f "$arg"; then - save_arg=$arg - moreargs= - for fil in `cat "$save_arg"` - do -# moreargs="$moreargs $fil" - arg=$fil - # A libtool-controlled object. - - # Check to see that this really is a libtool object. - if func_lalib_unsafe_p "$arg"; then - pic_object= - non_pic_object= - - # Read the .lo file - func_source "$arg" - - if test -z "$pic_object" || - test -z "$non_pic_object" || - test "$pic_object" = none && - test "$non_pic_object" = none; then - func_fatal_error "cannot find name of object for \`$arg'" - fi - - # Extract subdirectory from the argument. - func_dirname "$arg" "/" "" - xdir="$func_dirname_result" - - if test "$pic_object" != none; then - # Prepend the subdirectory the object is found in. - pic_object="$xdir$pic_object" - - if test "$prev" = dlfiles; then - if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then - dlfiles="$dlfiles $pic_object" - prev= - continue - else - # If libtool objects are unsupported, then we need to preload. - prev=dlprefiles - fi - fi - - # CHECK ME: I think I busted this. -Ossama - if test "$prev" = dlprefiles; then - # Preload the old-style object. - dlprefiles="$dlprefiles $pic_object" - prev= - fi - - # A PIC object. - func_append libobjs " $pic_object" - arg="$pic_object" - fi - - # Non-PIC object. - if test "$non_pic_object" != none; then - # Prepend the subdirectory the object is found in. - non_pic_object="$xdir$non_pic_object" - - # A standard non-PIC object - func_append non_pic_objects " $non_pic_object" - if test -z "$pic_object" || test "$pic_object" = none ; then - arg="$non_pic_object" - fi - else - # If the PIC object exists, use it instead. - # $xdir was prepended to $pic_object above. - non_pic_object="$pic_object" - func_append non_pic_objects " $non_pic_object" - fi - else - # Only an error if not doing a dry-run. - if $opt_dry_run; then - # Extract subdirectory from the argument. - func_dirname "$arg" "/" "" - xdir="$func_dirname_result" - - func_lo2o "$arg" - pic_object=$xdir$objdir/$func_lo2o_result - non_pic_object=$xdir$func_lo2o_result - func_append libobjs " $pic_object" - func_append non_pic_objects " $non_pic_object" - else - func_fatal_error "\`$arg' is not a valid libtool object" - fi - fi - done - else - func_fatal_error "link input file \`$arg' does not exist" - fi - arg=$save_arg - prev= - continue - ;; - precious_regex) - precious_files_regex="$arg" - prev= - continue - ;; - release) - release="-$arg" - prev= - continue - ;; - rpath | xrpath) - # We need an absolute path. - case $arg in - [\\/]* | [A-Za-z]:[\\/]*) ;; - *) - func_fatal_error "only absolute run-paths are allowed" - ;; - esac - if test "$prev" = rpath; then - case "$rpath " in - *" $arg "*) ;; - *) rpath="$rpath $arg" ;; - esac - else - case "$xrpath " in - *" $arg "*) ;; - *) xrpath="$xrpath $arg" ;; - esac - fi - prev= - continue - ;; - shrext) - shrext_cmds="$arg" - prev= - continue - ;; - weak) - weak_libs="$weak_libs $arg" - prev= - continue - ;; - xcclinker) - linker_flags="$linker_flags $qarg" - compiler_flags="$compiler_flags $qarg" - prev= - func_append compile_command " $qarg" - func_append finalize_command " $qarg" - continue - ;; - xcompiler) - compiler_flags="$compiler_flags $qarg" - prev= - func_append compile_command " $qarg" - func_append finalize_command " $qarg" - continue - ;; - xlinker) - linker_flags="$linker_flags $qarg" - compiler_flags="$compiler_flags $wl$qarg" - prev= - func_append compile_command " $wl$qarg" - func_append finalize_command " $wl$qarg" - continue - ;; - *) - eval "$prev=\"\$arg\"" - prev= - continue - ;; - esac - fi # test -n "$prev" - - prevarg="$arg" - - case $arg in - -all-static) - if test -n "$link_static_flag"; then - # See comment for -static flag below, for more details. - func_append compile_command " $link_static_flag" - func_append finalize_command " $link_static_flag" - fi - continue - ;; - - -allow-undefined) - # FIXME: remove this flag sometime in the future. - func_fatal_error "\`-allow-undefined' must not be used because it is the default" - ;; - - -avoid-version) - avoid_version=yes - continue - ;; - - -dlopen) - prev=dlfiles - continue - ;; - - -dlpreopen) - prev=dlprefiles - continue - ;; - - -export-dynamic) - export_dynamic=yes - continue - ;; - - -export-symbols | -export-symbols-regex) - if test -n "$export_symbols" || test -n "$export_symbols_regex"; then - func_fatal_error "more than one -exported-symbols argument is not allowed" - fi - if test "X$arg" = "X-export-symbols"; then - prev=expsyms - else - prev=expsyms_regex - fi - continue - ;; - - -framework) - prev=framework - continue - ;; - - -inst-prefix-dir) - prev=inst_prefix - continue - ;; - - # The native IRIX linker understands -LANG:*, -LIST:* and -LNO:* - # so, if we see these flags be careful not to treat them like -L - -L[A-Z][A-Z]*:*) - case $with_gcc/$host in - no/*-*-irix* | /*-*-irix*) - func_append compile_command " $arg" - func_append finalize_command " $arg" - ;; - esac - continue - ;; - - -L*) - func_stripname '-L' '' "$arg" - dir=$func_stripname_result - if test -z "$dir"; then - if test "$#" -gt 0; then - func_fatal_error "require no space between \`-L' and \`$1'" - else - func_fatal_error "need path for \`-L' option" - fi - fi - # We need an absolute path. - case $dir in - [\\/]* | [A-Za-z]:[\\/]*) ;; - *) - absdir=`cd "$dir" && pwd` - test -z "$absdir" && \ - func_fatal_error "cannot determine absolute directory name of \`$dir'" - dir="$absdir" - ;; - esac - case "$deplibs " in - *" -L$dir "*) ;; - *) - deplibs="$deplibs -L$dir" - lib_search_path="$lib_search_path $dir" - ;; - esac - case $host in - *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*) - testbindir=`$ECHO "X$dir" | $Xsed -e 's*/lib$*/bin*'` - case :$dllsearchpath: in - *":$dir:"*) ;; - ::) dllsearchpath=$dir;; - *) dllsearchpath="$dllsearchpath:$dir";; - esac - case :$dllsearchpath: in - *":$testbindir:"*) ;; - ::) dllsearchpath=$testbindir;; - *) dllsearchpath="$dllsearchpath:$testbindir";; - esac - ;; - esac - continue - ;; - - -l*) - if test "X$arg" = "X-lc" || test "X$arg" = "X-lm"; then - case $host in - *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-beos* | *-cegcc*) - # These systems don't actually have a C or math library (as such) - continue - ;; - *-*-os2*) - # These systems don't actually have a C library (as such) - test "X$arg" = "X-lc" && continue - ;; - *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*) - # Do not include libc due to us having libc/libc_r. - test "X$arg" = "X-lc" && continue - ;; - *-*-rhapsody* | *-*-darwin1.[012]) - # Rhapsody C and math libraries are in the System framework - deplibs="$deplibs System.ltframework" - continue - ;; - *-*-sco3.2v5* | *-*-sco5v6*) - # Causes problems with __ctype - test "X$arg" = "X-lc" && continue - ;; - *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*) - # Compiler inserts libc in the correct place for threads to work - test "X$arg" = "X-lc" && continue - ;; - esac - elif test "X$arg" = "X-lc_r"; then - case $host in - *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*) - # Do not include libc_r directly, use -pthread flag. - continue - ;; - esac - fi - deplibs="$deplibs $arg" - continue - ;; - - -module) - module=yes - continue - ;; - - # Tru64 UNIX uses -model [arg] to determine the layout of C++ - # classes, name mangling, and exception handling. - # Darwin uses the -arch flag to determine output architecture. - -model|-arch|-isysroot) - compiler_flags="$compiler_flags $arg" - func_append compile_command " $arg" - func_append finalize_command " $arg" - prev=xcompiler - continue - ;; - - -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe|-threads) - compiler_flags="$compiler_flags $arg" - func_append compile_command " $arg" - func_append finalize_command " $arg" - case "$new_inherited_linker_flags " in - *" $arg "*) ;; - * ) new_inherited_linker_flags="$new_inherited_linker_flags $arg" ;; - esac - continue - ;; - - -multi_module) - single_module="${wl}-multi_module" - continue - ;; - - -no-fast-install) - fast_install=no - continue - ;; - - -no-install) - case $host in - *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-darwin* | *-cegcc*) - # The PATH hackery in wrapper scripts is required on Windows - # and Darwin in order for the loader to find any dlls it needs. - func_warning "\`-no-install' is ignored for $host" - func_warning "assuming \`-no-fast-install' instead" - fast_install=no - ;; - *) no_install=yes ;; - esac - continue - ;; - - -no-undefined) - allow_undefined=no - continue - ;; - - -objectlist) - prev=objectlist - continue - ;; - - -o) prev=output ;; - - -precious-files-regex) - prev=precious_regex - continue - ;; - - -release) - prev=release - continue - ;; - - -rpath) - prev=rpath - continue - ;; - - -R) - prev=xrpath - continue - ;; - - -R*) - func_stripname '-R' '' "$arg" - dir=$func_stripname_result - # We need an absolute path. - case $dir in - [\\/]* | [A-Za-z]:[\\/]*) ;; - *) - func_fatal_error "only absolute run-paths are allowed" - ;; - esac - case "$xrpath " in - *" $dir "*) ;; - *) xrpath="$xrpath $dir" ;; - esac - continue - ;; - - -shared) - # The effects of -shared are defined in a previous loop. - continue - ;; - - -shrext) - prev=shrext - continue - ;; - - -static | -static-libtool-libs) - # The effects of -static are defined in a previous loop. - # We used to do the same as -all-static on platforms that - # didn't have a PIC flag, but the assumption that the effects - # would be equivalent was wrong. It would break on at least - # Digital Unix and AIX. - continue - ;; - - -thread-safe) - thread_safe=yes - continue - ;; - - -version-info) - prev=vinfo - continue - ;; - - -version-number) - prev=vinfo - vinfo_number=yes - continue - ;; - - -weak) - prev=weak - continue - ;; - - -Wc,*) - func_stripname '-Wc,' '' "$arg" - args=$func_stripname_result - arg= - save_ifs="$IFS"; IFS=',' - for flag in $args; do - IFS="$save_ifs" - func_quote_for_eval "$flag" - arg="$arg $wl$func_quote_for_eval_result" - compiler_flags="$compiler_flags $func_quote_for_eval_result" - done - IFS="$save_ifs" - func_stripname ' ' '' "$arg" - arg=$func_stripname_result - ;; - - -Wl,*) - func_stripname '-Wl,' '' "$arg" - args=$func_stripname_result - arg= - save_ifs="$IFS"; IFS=',' - for flag in $args; do - IFS="$save_ifs" - func_quote_for_eval "$flag" - arg="$arg $wl$func_quote_for_eval_result" - compiler_flags="$compiler_flags $wl$func_quote_for_eval_result" - linker_flags="$linker_flags $func_quote_for_eval_result" - done - IFS="$save_ifs" - func_stripname ' ' '' "$arg" - arg=$func_stripname_result - ;; - - -Xcompiler) - prev=xcompiler - continue - ;; - - -Xlinker) - prev=xlinker - continue - ;; - - -XCClinker) - prev=xcclinker - continue - ;; - - # -msg_* for osf cc - -msg_*) - func_quote_for_eval "$arg" - arg="$func_quote_for_eval_result" - ;; - - # -64, -mips[0-9] enable 64-bit mode on the SGI compiler - # -r[0-9][0-9]* specifies the processor on the SGI compiler - # -xarch=*, -xtarget=* enable 64-bit mode on the Sun compiler - # +DA*, +DD* enable 64-bit mode on the HP compiler - # -q* pass through compiler args for the IBM compiler - # -m*, -t[45]*, -txscale* pass through architecture-specific - # compiler args for GCC - # -F/path gives path to uninstalled frameworks, gcc on darwin - # -p, -pg, --coverage, -fprofile-* pass through profiling flag for GCC - # @file GCC response files - -64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*| \ - -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*) - func_quote_for_eval "$arg" - arg="$func_quote_for_eval_result" - func_append compile_command " $arg" - func_append finalize_command " $arg" - compiler_flags="$compiler_flags $arg" - continue - ;; - - # Some other compiler flag. - -* | +*) - func_quote_for_eval "$arg" - arg="$func_quote_for_eval_result" - ;; - - *.$objext) - # A standard object. - objs="$objs $arg" - ;; - - *.lo) - # A libtool-controlled object. - - # Check to see that this really is a libtool object. - if func_lalib_unsafe_p "$arg"; then - pic_object= - non_pic_object= - - # Read the .lo file - func_source "$arg" - - if test -z "$pic_object" || - test -z "$non_pic_object" || - test "$pic_object" = none && - test "$non_pic_object" = none; then - func_fatal_error "cannot find name of object for \`$arg'" - fi - - # Extract subdirectory from the argument. - func_dirname "$arg" "/" "" - xdir="$func_dirname_result" - - if test "$pic_object" != none; then - # Prepend the subdirectory the object is found in. - pic_object="$xdir$pic_object" - - if test "$prev" = dlfiles; then - if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then - dlfiles="$dlfiles $pic_object" - prev= - continue - else - # If libtool objects are unsupported, then we need to preload. - prev=dlprefiles - fi - fi - - # CHECK ME: I think I busted this. -Ossama - if test "$prev" = dlprefiles; then - # Preload the old-style object. - dlprefiles="$dlprefiles $pic_object" - prev= - fi - - # A PIC object. - func_append libobjs " $pic_object" - arg="$pic_object" - fi - - # Non-PIC object. - if test "$non_pic_object" != none; then - # Prepend the subdirectory the object is found in. - non_pic_object="$xdir$non_pic_object" - - # A standard non-PIC object - func_append non_pic_objects " $non_pic_object" - if test -z "$pic_object" || test "$pic_object" = none ; then - arg="$non_pic_object" - fi - else - # If the PIC object exists, use it instead. - # $xdir was prepended to $pic_object above. - non_pic_object="$pic_object" - func_append non_pic_objects " $non_pic_object" - fi - else - # Only an error if not doing a dry-run. - if $opt_dry_run; then - # Extract subdirectory from the argument. - func_dirname "$arg" "/" "" - xdir="$func_dirname_result" - - func_lo2o "$arg" - pic_object=$xdir$objdir/$func_lo2o_result - non_pic_object=$xdir$func_lo2o_result - func_append libobjs " $pic_object" - func_append non_pic_objects " $non_pic_object" - else - func_fatal_error "\`$arg' is not a valid libtool object" - fi - fi - ;; - - *.$libext) - # An archive. - deplibs="$deplibs $arg" - old_deplibs="$old_deplibs $arg" - continue - ;; - - *.la) - # A libtool-controlled library. - - if test "$prev" = dlfiles; then - # This library was specified with -dlopen. - dlfiles="$dlfiles $arg" - prev= - elif test "$prev" = dlprefiles; then - # The library was specified with -dlpreopen. - dlprefiles="$dlprefiles $arg" - prev= - else - deplibs="$deplibs $arg" - fi - continue - ;; - - # Some other compiler argument. - *) - # Unknown arguments in both finalize_command and compile_command need - # to be aesthetically quoted because they are evaled later. - func_quote_for_eval "$arg" - arg="$func_quote_for_eval_result" - ;; - esac # arg - - # Now actually substitute the argument into the commands. - if test -n "$arg"; then - func_append compile_command " $arg" - func_append finalize_command " $arg" - fi - done # argument parsing loop - - test -n "$prev" && \ - func_fatal_help "the \`$prevarg' option requires an argument" - - if test "$export_dynamic" = yes && test -n "$export_dynamic_flag_spec"; then - eval arg=\"$export_dynamic_flag_spec\" - func_append compile_command " $arg" - func_append finalize_command " $arg" - fi - - oldlibs= - # calculate the name of the file, without its directory - func_basename "$output" - outputname="$func_basename_result" - libobjs_save="$libobjs" - - if test -n "$shlibpath_var"; then - # get the directories listed in $shlibpath_var - eval shlib_search_path=\`\$ECHO \"X\${$shlibpath_var}\" \| \$Xsed -e \'s/:/ /g\'\` - else - shlib_search_path= - fi - eval sys_lib_search_path=\"$sys_lib_search_path_spec\" - eval sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\" - - func_dirname "$output" "/" "" - output_objdir="$func_dirname_result$objdir" - # Create the object directory. - func_mkdir_p "$output_objdir" - - # Determine the type of output - case $output in - "") - func_fatal_help "you must specify an output file" - ;; - *.$libext) linkmode=oldlib ;; - *.lo | *.$objext) linkmode=obj ;; - *.la) linkmode=lib ;; - *) linkmode=prog ;; # Anything else should be a program. - esac - - specialdeplibs= - - libs= - # Find all interdependent deplibs by searching for libraries - # that are linked more than once (e.g. -la -lb -la) - for deplib in $deplibs; do - if $opt_duplicate_deps ; then - case "$libs " in - *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; - esac - fi - libs="$libs $deplib" - done - - if test "$linkmode" = lib; then - libs="$predeps $libs $compiler_lib_search_path $postdeps" - - # Compute libraries that are listed more than once in $predeps - # $postdeps and mark them as special (i.e., whose duplicates are - # not to be eliminated). - pre_post_deps= - if $opt_duplicate_compiler_generated_deps; then - for pre_post_dep in $predeps $postdeps; do - case "$pre_post_deps " in - *" $pre_post_dep "*) specialdeplibs="$specialdeplibs $pre_post_deps" ;; - esac - pre_post_deps="$pre_post_deps $pre_post_dep" - done - fi - pre_post_deps= - fi - - deplibs= - newdependency_libs= - newlib_search_path= - need_relink=no # whether we're linking any uninstalled libtool libraries - notinst_deplibs= # not-installed libtool libraries - notinst_path= # paths that contain not-installed libtool libraries - - case $linkmode in - lib) - passes="conv dlpreopen link" - for file in $dlfiles $dlprefiles; do - case $file in - *.la) ;; - *) - func_fatal_help "libraries can \`-dlopen' only libtool libraries: $file" - ;; - esac - done - ;; - prog) - compile_deplibs= - finalize_deplibs= - alldeplibs=no - newdlfiles= - newdlprefiles= - passes="conv scan dlopen dlpreopen link" - ;; - *) passes="conv" - ;; - esac - - for pass in $passes; do - # The preopen pass in lib mode reverses $deplibs; put it back here - # so that -L comes before libs that need it for instance... - if test "$linkmode,$pass" = "lib,link"; then - ## FIXME: Find the place where the list is rebuilt in the wrong - ## order, and fix it there properly - tmp_deplibs= - for deplib in $deplibs; do - tmp_deplibs="$deplib $tmp_deplibs" - done - deplibs="$tmp_deplibs" - fi - - if test "$linkmode,$pass" = "lib,link" || - test "$linkmode,$pass" = "prog,scan"; then - libs="$deplibs" - deplibs= - fi - if test "$linkmode" = prog; then - case $pass in - dlopen) libs="$dlfiles" ;; - dlpreopen) libs="$dlprefiles" ;; - link) - libs="$deplibs %DEPLIBS%" - test "X$link_all_deplibs" != Xno && libs="$libs $dependency_libs" - ;; - esac - fi - if test "$linkmode,$pass" = "lib,dlpreopen"; then - # Collect and forward deplibs of preopened libtool libs - for lib in $dlprefiles; do - # Ignore non-libtool-libs - dependency_libs= - case $lib in - *.la) func_source "$lib" ;; - esac - - # Collect preopened libtool deplibs, except any this library - # has declared as weak libs - for deplib in $dependency_libs; do - deplib_base=`$ECHO "X$deplib" | $Xsed -e "$basename"` - case " $weak_libs " in - *" $deplib_base "*) ;; - *) deplibs="$deplibs $deplib" ;; - esac - done - done - libs="$dlprefiles" - fi - if test "$pass" = dlopen; then - # Collect dlpreopened libraries - save_deplibs="$deplibs" - deplibs= - fi - - for deplib in $libs; do - lib= - found=no - case $deplib in - -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe|-threads) - if test "$linkmode,$pass" = "prog,link"; then - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - else - compiler_flags="$compiler_flags $deplib" - if test "$linkmode" = lib ; then - case "$new_inherited_linker_flags " in - *" $deplib "*) ;; - * ) new_inherited_linker_flags="$new_inherited_linker_flags $deplib" ;; - esac - fi - fi - continue - ;; - -l*) - if test "$linkmode" != lib && test "$linkmode" != prog; then - func_warning "\`-l' is ignored for archives/objects" - continue - fi - func_stripname '-l' '' "$deplib" - name=$func_stripname_result - if test "$linkmode" = lib; then - searchdirs="$newlib_search_path $lib_search_path $compiler_lib_search_dirs $sys_lib_search_path $shlib_search_path" - else - searchdirs="$newlib_search_path $lib_search_path $sys_lib_search_path $shlib_search_path" - fi - for searchdir in $searchdirs; do - for search_ext in .la $std_shrext .so .a; do - # Search the libtool library - lib="$searchdir/lib${name}${search_ext}" - if test -f "$lib"; then - if test "$search_ext" = ".la"; then - found=yes - else - found=no - fi - break 2 - fi - done - done - if test "$found" != yes; then - # deplib doesn't seem to be a libtool library - if test "$linkmode,$pass" = "prog,link"; then - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - else - deplibs="$deplib $deplibs" - test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs" - fi - continue - else # deplib is a libtool library - # If $allow_libtool_libs_with_static_runtimes && $deplib is a stdlib, - # We need to do some special things here, and not later. - if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - case " $predeps $postdeps " in - *" $deplib "*) - if func_lalib_p "$lib"; then - library_names= - old_library= - func_source "$lib" - for l in $old_library $library_names; do - ll="$l" - done - if test "X$ll" = "X$old_library" ; then # only static version available - found=no - func_dirname "$lib" "" "." - ladir="$func_dirname_result" - lib=$ladir/$old_library - if test "$linkmode,$pass" = "prog,link"; then - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - else - deplibs="$deplib $deplibs" - test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs" - fi - continue - fi - fi - ;; - *) ;; - esac - fi - fi - ;; # -l - *.ltframework) - if test "$linkmode,$pass" = "prog,link"; then - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - else - deplibs="$deplib $deplibs" - if test "$linkmode" = lib ; then - case "$new_inherited_linker_flags " in - *" $deplib "*) ;; - * ) new_inherited_linker_flags="$new_inherited_linker_flags $deplib" ;; - esac - fi - fi - continue - ;; - -L*) - case $linkmode in - lib) - deplibs="$deplib $deplibs" - test "$pass" = conv && continue - newdependency_libs="$deplib $newdependency_libs" - func_stripname '-L' '' "$deplib" - newlib_search_path="$newlib_search_path $func_stripname_result" - ;; - prog) - if test "$pass" = conv; then - deplibs="$deplib $deplibs" - continue - fi - if test "$pass" = scan; then - deplibs="$deplib $deplibs" - else - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - fi - func_stripname '-L' '' "$deplib" - newlib_search_path="$newlib_search_path $func_stripname_result" - ;; - *) - func_warning "\`-L' is ignored for archives/objects" - ;; - esac # linkmode - continue - ;; # -L - -R*) - if test "$pass" = link; then - func_stripname '-R' '' "$deplib" - dir=$func_stripname_result - # Make sure the xrpath contains only unique directories. - case "$xrpath " in - *" $dir "*) ;; - *) xrpath="$xrpath $dir" ;; - esac - fi - deplibs="$deplib $deplibs" - continue - ;; - *.la) lib="$deplib" ;; - *.$libext) - if test "$pass" = conv; then - deplibs="$deplib $deplibs" - continue - fi - case $linkmode in - lib) - # Linking convenience modules into shared libraries is allowed, - # but linking other static libraries is non-portable. - case " $dlpreconveniencelibs " in - *" $deplib "*) ;; - *) - valid_a_lib=no - case $deplibs_check_method in - match_pattern*) - set dummy $deplibs_check_method; shift - match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"` - if eval "\$ECHO \"X$deplib\"" 2>/dev/null | $Xsed -e 10q \ - | $EGREP "$match_pattern_regex" > /dev/null; then - valid_a_lib=yes - fi - ;; - pass_all) - valid_a_lib=yes - ;; - esac - if test "$valid_a_lib" != yes; then - $ECHO - $ECHO "*** Warning: Trying to link with static lib archive $deplib." - $ECHO "*** I have the capability to make that library automatically link in when" - $ECHO "*** you link to this library. But I can only do this if you have a" - $ECHO "*** shared version of the library, which you do not appear to have" - $ECHO "*** because the file extensions .$libext of this argument makes me believe" - $ECHO "*** that it is just a static archive that I should not use here." - else - $ECHO - $ECHO "*** Warning: Linking the shared library $output against the" - $ECHO "*** static library $deplib is not portable!" - deplibs="$deplib $deplibs" - fi - ;; - esac - continue - ;; - prog) - if test "$pass" != link; then - deplibs="$deplib $deplibs" - else - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - fi - continue - ;; - esac # linkmode - ;; # *.$libext - *.lo | *.$objext) - if test "$pass" = conv; then - deplibs="$deplib $deplibs" - elif test "$linkmode" = prog; then - if test "$pass" = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then - # If there is no dlopen support or we're linking statically, - # we need to preload. - newdlprefiles="$newdlprefiles $deplib" - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - else - newdlfiles="$newdlfiles $deplib" - fi - fi - continue - ;; - %DEPLIBS%) - alldeplibs=yes - continue - ;; - esac # case $deplib - - if test "$found" = yes || test -f "$lib"; then : - else - func_fatal_error "cannot find the library \`$lib' or unhandled argument \`$deplib'" - fi - - # Check to see that this really is a libtool archive. - func_lalib_unsafe_p "$lib" \ - || func_fatal_error "\`$lib' is not a valid libtool archive" - - func_dirname "$lib" "" "." - ladir="$func_dirname_result" - - dlname= - dlopen= - dlpreopen= - libdir= - library_names= - old_library= - inherited_linker_flags= - # If the library was installed with an old release of libtool, - # it will not redefine variables installed, or shouldnotlink - installed=yes - shouldnotlink=no - avoidtemprpath= - - - # Read the .la file - func_source "$lib" - - # Convert "-framework foo" to "foo.ltframework" - if test -n "$inherited_linker_flags"; then - tmp_inherited_linker_flags=`$ECHO "X$inherited_linker_flags" | $Xsed -e 's/-framework \([^ $]*\)/\1.ltframework/g'` - for tmp_inherited_linker_flag in $tmp_inherited_linker_flags; do - case " $new_inherited_linker_flags " in - *" $tmp_inherited_linker_flag "*) ;; - *) new_inherited_linker_flags="$new_inherited_linker_flags $tmp_inherited_linker_flag";; - esac - done - fi - dependency_libs=`$ECHO "X $dependency_libs" | $Xsed -e 's% \([^ $]*\).ltframework% -framework \1%g'` - if test "$linkmode,$pass" = "lib,link" || - test "$linkmode,$pass" = "prog,scan" || - { test "$linkmode" != prog && test "$linkmode" != lib; }; then - test -n "$dlopen" && dlfiles="$dlfiles $dlopen" - test -n "$dlpreopen" && dlprefiles="$dlprefiles $dlpreopen" - fi - - if test "$pass" = conv; then - # Only check for convenience libraries - deplibs="$lib $deplibs" - if test -z "$libdir"; then - if test -z "$old_library"; then - func_fatal_error "cannot find name of link library for \`$lib'" - fi - # It is a libtool convenience library, so add in its objects. - convenience="$convenience $ladir/$objdir/$old_library" - old_convenience="$old_convenience $ladir/$objdir/$old_library" - tmp_libs= - for deplib in $dependency_libs; do - deplibs="$deplib $deplibs" - if $opt_duplicate_deps ; then - case "$tmp_libs " in - *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; - esac - fi - tmp_libs="$tmp_libs $deplib" - done - elif test "$linkmode" != prog && test "$linkmode" != lib; then - func_fatal_error "\`$lib' is not a convenience library" - fi - continue - fi # $pass = conv - - - # Get the name of the library we link against. - linklib= - for l in $old_library $library_names; do - linklib="$l" - done - if test -z "$linklib"; then - func_fatal_error "cannot find name of link library for \`$lib'" - fi - - # This library was specified with -dlopen. - if test "$pass" = dlopen; then - if test -z "$libdir"; then - func_fatal_error "cannot -dlopen a convenience library: \`$lib'" - fi - if test -z "$dlname" || - test "$dlopen_support" != yes || - test "$build_libtool_libs" = no; then - # If there is no dlname, no dlopen support or we're linking - # statically, we need to preload. We also need to preload any - # dependent libraries so libltdl's deplib preloader doesn't - # bomb out in the load deplibs phase. - dlprefiles="$dlprefiles $lib $dependency_libs" - else - newdlfiles="$newdlfiles $lib" - fi - continue - fi # $pass = dlopen - - # We need an absolute path. - case $ladir in - [\\/]* | [A-Za-z]:[\\/]*) abs_ladir="$ladir" ;; - *) - abs_ladir=`cd "$ladir" && pwd` - if test -z "$abs_ladir"; then - func_warning "cannot determine absolute directory name of \`$ladir'" - func_warning "passing it literally to the linker, although it might fail" - abs_ladir="$ladir" - fi - ;; - esac - func_basename "$lib" - laname="$func_basename_result" - - # Find the relevant object directory and library name. - if test "X$installed" = Xyes; then - if test ! -f "$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then - func_warning "library \`$lib' was moved." - dir="$ladir" - absdir="$abs_ladir" - libdir="$abs_ladir" - else - dir="$libdir" - absdir="$libdir" - fi - test "X$hardcode_automatic" = Xyes && avoidtemprpath=yes - else - if test ! -f "$ladir/$objdir/$linklib" && test -f "$abs_ladir/$linklib"; then - dir="$ladir" - absdir="$abs_ladir" - # Remove this search path later - notinst_path="$notinst_path $abs_ladir" - else - dir="$ladir/$objdir" - absdir="$abs_ladir/$objdir" - # Remove this search path later - notinst_path="$notinst_path $abs_ladir" - fi - fi # $installed = yes - func_stripname 'lib' '.la' "$laname" - name=$func_stripname_result - - # This library was specified with -dlpreopen. - if test "$pass" = dlpreopen; then - if test -z "$libdir" && test "$linkmode" = prog; then - func_fatal_error "only libraries may -dlpreopen a convenience library: \`$lib'" - fi - # Prefer using a static library (so that no silly _DYNAMIC symbols - # are required to link). - if test -n "$old_library"; then - newdlprefiles="$newdlprefiles $dir/$old_library" - # Keep a list of preopened convenience libraries to check - # that they are being used correctly in the link pass. - test -z "$libdir" && \ - dlpreconveniencelibs="$dlpreconveniencelibs $dir/$old_library" - # Otherwise, use the dlname, so that lt_dlopen finds it. - elif test -n "$dlname"; then - newdlprefiles="$newdlprefiles $dir/$dlname" - else - newdlprefiles="$newdlprefiles $dir/$linklib" - fi - fi # $pass = dlpreopen - - if test -z "$libdir"; then - # Link the convenience library - if test "$linkmode" = lib; then - deplibs="$dir/$old_library $deplibs" - elif test "$linkmode,$pass" = "prog,link"; then - compile_deplibs="$dir/$old_library $compile_deplibs" - finalize_deplibs="$dir/$old_library $finalize_deplibs" - else - deplibs="$lib $deplibs" # used for prog,scan pass - fi - continue - fi - - - if test "$linkmode" = prog && test "$pass" != link; then - newlib_search_path="$newlib_search_path $ladir" - deplibs="$lib $deplibs" - - linkalldeplibs=no - if test "$link_all_deplibs" != no || test -z "$library_names" || - test "$build_libtool_libs" = no; then - linkalldeplibs=yes - fi - - tmp_libs= - for deplib in $dependency_libs; do - case $deplib in - -L*) func_stripname '-L' '' "$deplib" - newlib_search_path="$newlib_search_path $func_stripname_result" - ;; - esac - # Need to link against all dependency_libs? - if test "$linkalldeplibs" = yes; then - deplibs="$deplib $deplibs" - else - # Need to hardcode shared library paths - # or/and link against static libraries - newdependency_libs="$deplib $newdependency_libs" - fi - if $opt_duplicate_deps ; then - case "$tmp_libs " in - *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; - esac - fi - tmp_libs="$tmp_libs $deplib" - done # for deplib - continue - fi # $linkmode = prog... - - if test "$linkmode,$pass" = "prog,link"; then - if test -n "$library_names" && - { { test "$prefer_static_libs" = no || - test "$prefer_static_libs,$installed" = "built,yes"; } || - test -z "$old_library"; }; then - # We need to hardcode the library path - if test -n "$shlibpath_var" && test -z "$avoidtemprpath" ; then - # Make sure the rpath contains only unique directories. - case "$temp_rpath:" in - *"$absdir:"*) ;; - *) temp_rpath="$temp_rpath$absdir:" ;; - esac - fi - - # Hardcode the library path. - # Skip directories that are in the system default run-time - # search path. - case " $sys_lib_dlsearch_path " in - *" $absdir "*) ;; - *) - case "$compile_rpath " in - *" $absdir "*) ;; - *) compile_rpath="$compile_rpath $absdir" - esac - ;; - esac - case " $sys_lib_dlsearch_path " in - *" $libdir "*) ;; - *) - case "$finalize_rpath " in - *" $libdir "*) ;; - *) finalize_rpath="$finalize_rpath $libdir" - esac - ;; - esac - fi # $linkmode,$pass = prog,link... - - if test "$alldeplibs" = yes && - { test "$deplibs_check_method" = pass_all || - { test "$build_libtool_libs" = yes && - test -n "$library_names"; }; }; then - # We only need to search for static libraries - continue - fi - fi - - link_static=no # Whether the deplib will be linked statically - use_static_libs=$prefer_static_libs - if test "$use_static_libs" = built && test "$installed" = yes; then - use_static_libs=no - fi - if test -n "$library_names" && - { test "$use_static_libs" = no || test -z "$old_library"; }; then - case $host in - *cygwin* | *mingw* | *cegcc*) - # No point in relinking DLLs because paths are not encoded - notinst_deplibs="$notinst_deplibs $lib" - need_relink=no - ;; - *) - if test "$installed" = no; then - notinst_deplibs="$notinst_deplibs $lib" - need_relink=yes - fi - ;; - esac - # This is a shared library - - # Warn about portability, can't link against -module's on some - # systems (darwin). Don't bleat about dlopened modules though! - dlopenmodule="" - for dlpremoduletest in $dlprefiles; do - if test "X$dlpremoduletest" = "X$lib"; then - dlopenmodule="$dlpremoduletest" - break - fi - done - if test -z "$dlopenmodule" && test "$shouldnotlink" = yes && test "$pass" = link; then - $ECHO - if test "$linkmode" = prog; then - $ECHO "*** Warning: Linking the executable $output against the loadable module" - else - $ECHO "*** Warning: Linking the shared library $output against the loadable module" - fi - $ECHO "*** $linklib is not portable!" - fi - if test "$linkmode" = lib && - test "$hardcode_into_libs" = yes; then - # Hardcode the library path. - # Skip directories that are in the system default run-time - # search path. - case " $sys_lib_dlsearch_path " in - *" $absdir "*) ;; - *) - case "$compile_rpath " in - *" $absdir "*) ;; - *) compile_rpath="$compile_rpath $absdir" - esac - ;; - esac - case " $sys_lib_dlsearch_path " in - *" $libdir "*) ;; - *) - case "$finalize_rpath " in - *" $libdir "*) ;; - *) finalize_rpath="$finalize_rpath $libdir" - esac - ;; - esac - fi - - if test -n "$old_archive_from_expsyms_cmds"; then - # figure out the soname - set dummy $library_names - shift - realname="$1" - shift - libname=`eval "\\$ECHO \"$libname_spec\""` - # use dlname if we got it. it's perfectly good, no? - if test -n "$dlname"; then - soname="$dlname" - elif test -n "$soname_spec"; then - # bleh windows - case $host in - *cygwin* | mingw* | *cegcc*) - func_arith $current - $age - major=$func_arith_result - versuffix="-$major" - ;; - esac - eval soname=\"$soname_spec\" - else - soname="$realname" - fi - - # Make a new name for the extract_expsyms_cmds to use - soroot="$soname" - func_basename "$soroot" - soname="$func_basename_result" - func_stripname 'lib' '.dll' "$soname" - newlib=libimp-$func_stripname_result.a - - # If the library has no export list, then create one now - if test -f "$output_objdir/$soname-def"; then : - else - func_verbose "extracting exported symbol list from \`$soname'" - func_execute_cmds "$extract_expsyms_cmds" 'exit $?' - fi - - # Create $newlib - if test -f "$output_objdir/$newlib"; then :; else - func_verbose "generating import library for \`$soname'" - func_execute_cmds "$old_archive_from_expsyms_cmds" 'exit $?' - fi - # make sure the library variables are pointing to the new library - dir=$output_objdir - linklib=$newlib - fi # test -n "$old_archive_from_expsyms_cmds" - - if test "$linkmode" = prog || test "$mode" != relink; then - add_shlibpath= - add_dir= - add= - lib_linked=yes - case $hardcode_action in - immediate | unsupported) - if test "$hardcode_direct" = no; then - add="$dir/$linklib" - case $host in - *-*-sco3.2v5.0.[024]*) add_dir="-L$dir" ;; - *-*-sysv4*uw2*) add_dir="-L$dir" ;; - *-*-sysv5OpenUNIX* | *-*-sysv5UnixWare7.[01].[10]* | \ - *-*-unixware7*) add_dir="-L$dir" ;; - *-*-darwin* ) - # if the lib is a (non-dlopened) module then we can not - # link against it, someone is ignoring the earlier warnings - if /usr/bin/file -L $add 2> /dev/null | - $GREP ": [^:]* bundle" >/dev/null ; then - if test "X$dlopenmodule" != "X$lib"; then - $ECHO "*** Warning: lib $linklib is a module, not a shared library" - if test -z "$old_library" ; then - $ECHO - $ECHO "*** And there doesn't seem to be a static archive available" - $ECHO "*** The link will probably fail, sorry" - else - add="$dir/$old_library" - fi - elif test -n "$old_library"; then - add="$dir/$old_library" - fi - fi - esac - elif test "$hardcode_minus_L" = no; then - case $host in - *-*-sunos*) add_shlibpath="$dir" ;; - esac - add_dir="-L$dir" - add="-l$name" - elif test "$hardcode_shlibpath_var" = no; then - add_shlibpath="$dir" - add="-l$name" - else - lib_linked=no - fi - ;; - relink) - if test "$hardcode_direct" = yes && - test "$hardcode_direct_absolute" = no; then - add="$dir/$linklib" - elif test "$hardcode_minus_L" = yes; then - add_dir="-L$dir" - # Try looking first in the location we're being installed to. - if test -n "$inst_prefix_dir"; then - case $libdir in - [\\/]*) - add_dir="$add_dir -L$inst_prefix_dir$libdir" - ;; - esac - fi - add="-l$name" - elif test "$hardcode_shlibpath_var" = yes; then - add_shlibpath="$dir" - add="-l$name" - else - lib_linked=no - fi - ;; - *) lib_linked=no ;; - esac - - if test "$lib_linked" != yes; then - func_fatal_configuration "unsupported hardcode properties" - fi - - if test -n "$add_shlibpath"; then - case :$compile_shlibpath: in - *":$add_shlibpath:"*) ;; - *) compile_shlibpath="$compile_shlibpath$add_shlibpath:" ;; - esac - fi - if test "$linkmode" = prog; then - test -n "$add_dir" && compile_deplibs="$add_dir $compile_deplibs" - test -n "$add" && compile_deplibs="$add $compile_deplibs" - else - test -n "$add_dir" && deplibs="$add_dir $deplibs" - test -n "$add" && deplibs="$add $deplibs" - if test "$hardcode_direct" != yes && - test "$hardcode_minus_L" != yes && - test "$hardcode_shlibpath_var" = yes; then - case :$finalize_shlibpath: in - *":$libdir:"*) ;; - *) finalize_shlibpath="$finalize_shlibpath$libdir:" ;; - esac - fi - fi - fi - - if test "$linkmode" = prog || test "$mode" = relink; then - add_shlibpath= - add_dir= - add= - # Finalize command for both is simple: just hardcode it. - if test "$hardcode_direct" = yes && - test "$hardcode_direct_absolute" = no; then - add="$libdir/$linklib" - elif test "$hardcode_minus_L" = yes; then - add_dir="-L$libdir" - add="-l$name" - elif test "$hardcode_shlibpath_var" = yes; then - case :$finalize_shlibpath: in - *":$libdir:"*) ;; - *) finalize_shlibpath="$finalize_shlibpath$libdir:" ;; - esac - add="-l$name" - elif test "$hardcode_automatic" = yes; then - if test -n "$inst_prefix_dir" && - test -f "$inst_prefix_dir$libdir/$linklib" ; then - add="$inst_prefix_dir$libdir/$linklib" - else - add="$libdir/$linklib" - fi - else - # We cannot seem to hardcode it, guess we'll fake it. - add_dir="-L$libdir" - # Try looking first in the location we're being installed to. - if test -n "$inst_prefix_dir"; then - case $libdir in - [\\/]*) - add_dir="$add_dir -L$inst_prefix_dir$libdir" - ;; - esac - fi - add="-l$name" - fi - - if test "$linkmode" = prog; then - test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs" - test -n "$add" && finalize_deplibs="$add $finalize_deplibs" - else - test -n "$add_dir" && deplibs="$add_dir $deplibs" - test -n "$add" && deplibs="$add $deplibs" - fi - fi - elif test "$linkmode" = prog; then - # Here we assume that one of hardcode_direct or hardcode_minus_L - # is not unsupported. This is valid on all known static and - # shared platforms. - if test "$hardcode_direct" != unsupported; then - test -n "$old_library" && linklib="$old_library" - compile_deplibs="$dir/$linklib $compile_deplibs" - finalize_deplibs="$dir/$linklib $finalize_deplibs" - else - compile_deplibs="-l$name -L$dir $compile_deplibs" - finalize_deplibs="-l$name -L$dir $finalize_deplibs" - fi - elif test "$build_libtool_libs" = yes; then - # Not a shared library - if test "$deplibs_check_method" != pass_all; then - # We're trying link a shared library against a static one - # but the system doesn't support it. - - # Just print a warning and add the library to dependency_libs so - # that the program can be linked against the static library. - $ECHO - $ECHO "*** Warning: This system can not link to static lib archive $lib." - $ECHO "*** I have the capability to make that library automatically link in when" - $ECHO "*** you link to this library. But I can only do this if you have a" - $ECHO "*** shared version of the library, which you do not appear to have." - if test "$module" = yes; then - $ECHO "*** But as you try to build a module library, libtool will still create " - $ECHO "*** a static module, that should work as long as the dlopening application" - $ECHO "*** is linked with the -dlopen flag to resolve symbols at runtime." - if test -z "$global_symbol_pipe"; then - $ECHO - $ECHO "*** However, this would only work if libtool was able to extract symbol" - $ECHO "*** lists from a program, using \`nm' or equivalent, but libtool could" - $ECHO "*** not find such a program. So, this module is probably useless." - $ECHO "*** \`nm' from GNU binutils and a full rebuild may help." - fi - if test "$build_old_libs" = no; then - build_libtool_libs=module - build_old_libs=yes - else - build_libtool_libs=no - fi - fi - else - deplibs="$dir/$old_library $deplibs" - link_static=yes - fi - fi # link shared/static library? - - if test "$linkmode" = lib; then - if test -n "$dependency_libs" && - { test "$hardcode_into_libs" != yes || - test "$build_old_libs" = yes || - test "$link_static" = yes; }; then - # Extract -R from dependency_libs - temp_deplibs= - for libdir in $dependency_libs; do - case $libdir in - -R*) func_stripname '-R' '' "$libdir" - temp_xrpath=$func_stripname_result - case " $xrpath " in - *" $temp_xrpath "*) ;; - *) xrpath="$xrpath $temp_xrpath";; - esac;; - *) temp_deplibs="$temp_deplibs $libdir";; - esac - done - dependency_libs="$temp_deplibs" - fi - - newlib_search_path="$newlib_search_path $absdir" - # Link against this library - test "$link_static" = no && newdependency_libs="$abs_ladir/$laname $newdependency_libs" - # ... and its dependency_libs - tmp_libs= - for deplib in $dependency_libs; do - newdependency_libs="$deplib $newdependency_libs" - if $opt_duplicate_deps ; then - case "$tmp_libs " in - *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; - esac - fi - tmp_libs="$tmp_libs $deplib" - done - - if test "$link_all_deplibs" != no; then - # Add the search paths of all dependency libraries - for deplib in $dependency_libs; do - path= - case $deplib in - -L*) path="$deplib" ;; - *.la) - func_dirname "$deplib" "" "." - dir="$func_dirname_result" - # We need an absolute path. - case $dir in - [\\/]* | [A-Za-z]:[\\/]*) absdir="$dir" ;; - *) - absdir=`cd "$dir" && pwd` - if test -z "$absdir"; then - func_warning "cannot determine absolute directory name of \`$dir'" - absdir="$dir" - fi - ;; - esac - if $GREP "^installed=no" $deplib > /dev/null; then - case $host in - *-*-darwin*) - depdepl= - eval deplibrary_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib` - if test -n "$deplibrary_names" ; then - for tmp in $deplibrary_names ; do - depdepl=$tmp - done - if test -f "$absdir/$objdir/$depdepl" ; then - depdepl="$absdir/$objdir/$depdepl" - darwin_install_name=`${OTOOL} -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'` - if test -z "$darwin_install_name"; then - darwin_install_name=`${OTOOL64} -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'` - fi - compiler_flags="$compiler_flags ${wl}-dylib_file ${wl}${darwin_install_name}:${depdepl}" - linker_flags="$linker_flags -dylib_file ${darwin_install_name}:${depdepl}" - path= - fi - fi - ;; - *) - path="-L$absdir/$objdir" - ;; - esac - else - eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib` - test -z "$libdir" && \ - func_fatal_error "\`$deplib' is not a valid libtool archive" - test "$absdir" != "$libdir" && \ - func_warning "\`$deplib' seems to be moved" - - path="-L$absdir" - fi - ;; - esac - case " $deplibs " in - *" $path "*) ;; - *) deplibs="$path $deplibs" ;; - esac - done - fi # link_all_deplibs != no - fi # linkmode = lib - done # for deplib in $libs - if test "$pass" = link; then - if test "$linkmode" = "prog"; then - compile_deplibs="$new_inherited_linker_flags $compile_deplibs" - finalize_deplibs="$new_inherited_linker_flags $finalize_deplibs" - else - compiler_flags="$compiler_flags "`$ECHO "X $new_inherited_linker_flags" | $Xsed -e 's% \([^ $]*\).ltframework% -framework \1%g'` - fi - fi - dependency_libs="$newdependency_libs" - if test "$pass" = dlpreopen; then - # Link the dlpreopened libraries before other libraries - for deplib in $save_deplibs; do - deplibs="$deplib $deplibs" - done - fi - if test "$pass" != dlopen; then - if test "$pass" != conv; then - # Make sure lib_search_path contains only unique directories. - lib_search_path= - for dir in $newlib_search_path; do - case "$lib_search_path " in - *" $dir "*) ;; - *) lib_search_path="$lib_search_path $dir" ;; - esac - done - newlib_search_path= - fi - - if test "$linkmode,$pass" != "prog,link"; then - vars="deplibs" - else - vars="compile_deplibs finalize_deplibs" - fi - for var in $vars dependency_libs; do - # Add libraries to $var in reverse order - eval tmp_libs=\"\$$var\" - new_libs= - for deplib in $tmp_libs; do - # FIXME: Pedantically, this is the right thing to do, so - # that some nasty dependency loop isn't accidentally - # broken: - #new_libs="$deplib $new_libs" - # Pragmatically, this seems to cause very few problems in - # practice: - case $deplib in - -L*) new_libs="$deplib $new_libs" ;; - -R*) ;; - *) - # And here is the reason: when a library appears more - # than once as an explicit dependence of a library, or - # is implicitly linked in more than once by the - # compiler, it is considered special, and multiple - # occurrences thereof are not removed. Compare this - # with having the same library being listed as a - # dependency of multiple other libraries: in this case, - # we know (pedantically, we assume) the library does not - # need to be listed more than once, so we keep only the - # last copy. This is not always right, but it is rare - # enough that we require users that really mean to play - # such unportable linking tricks to link the library - # using -Wl,-lname, so that libtool does not consider it - # for duplicate removal. - case " $specialdeplibs " in - *" $deplib "*) new_libs="$deplib $new_libs" ;; - *) - case " $new_libs " in - *" $deplib "*) ;; - *) new_libs="$deplib $new_libs" ;; - esac - ;; - esac - ;; - esac - done - tmp_libs= - for deplib in $new_libs; do - case $deplib in - -L*) - case " $tmp_libs " in - *" $deplib "*) ;; - *) tmp_libs="$tmp_libs $deplib" ;; - esac - ;; - *) tmp_libs="$tmp_libs $deplib" ;; - esac - done - eval $var=\"$tmp_libs\" - done # for var - fi - # Last step: remove runtime libs from dependency_libs - # (they stay in deplibs) - tmp_libs= - for i in $dependency_libs ; do - case " $predeps $postdeps $compiler_lib_search_path " in - *" $i "*) - i="" - ;; - esac - if test -n "$i" ; then - tmp_libs="$tmp_libs $i" - fi - done - dependency_libs=$tmp_libs - done # for pass - if test "$linkmode" = prog; then - dlfiles="$newdlfiles" - fi - if test "$linkmode" = prog || test "$linkmode" = lib; then - dlprefiles="$newdlprefiles" - fi - - case $linkmode in - oldlib) - if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then - func_warning "\`-dlopen' is ignored for archives" - fi - - case " $deplibs" in - *\ -l* | *\ -L*) - func_warning "\`-l' and \`-L' are ignored for archives" ;; - esac - - test -n "$rpath" && \ - func_warning "\`-rpath' is ignored for archives" - - test -n "$xrpath" && \ - func_warning "\`-R' is ignored for archives" - - test -n "$vinfo" && \ - func_warning "\`-version-info/-version-number' is ignored for archives" - - test -n "$release" && \ - func_warning "\`-release' is ignored for archives" - - test -n "$export_symbols$export_symbols_regex" && \ - func_warning "\`-export-symbols' is ignored for archives" - - # Now set the variables for building old libraries. - build_libtool_libs=no - oldlibs="$output" - objs="$objs$old_deplibs" - ;; - - lib) - # Make sure we only generate libraries of the form `libNAME.la'. - case $outputname in - lib*) - func_stripname 'lib' '.la' "$outputname" - name=$func_stripname_result - eval shared_ext=\"$shrext_cmds\" - eval libname=\"$libname_spec\" - ;; - *) - test "$module" = no && \ - func_fatal_help "libtool library \`$output' must begin with \`lib'" - - if test "$need_lib_prefix" != no; then - # Add the "lib" prefix for modules if required - func_stripname '' '.la' "$outputname" - name=$func_stripname_result - eval shared_ext=\"$shrext_cmds\" - eval libname=\"$libname_spec\" - else - func_stripname '' '.la' "$outputname" - libname=$func_stripname_result - fi - ;; - esac - - if test -n "$objs"; then - if test "$deplibs_check_method" != pass_all; then - func_fatal_error "cannot build libtool library \`$output' from non-libtool objects on this host:$objs" - else - $ECHO - $ECHO "*** Warning: Linking the shared library $output against the non-libtool" - $ECHO "*** objects $objs is not portable!" - libobjs="$libobjs $objs" - fi - fi - - test "$dlself" != no && \ - func_warning "\`-dlopen self' is ignored for libtool libraries" - - set dummy $rpath - shift - test "$#" -gt 1 && \ - func_warning "ignoring multiple \`-rpath's for a libtool library" - - install_libdir="$1" - - oldlibs= - if test -z "$rpath"; then - if test "$build_libtool_libs" = yes; then - # Building a libtool convenience library. - # Some compilers have problems with a `.al' extension so - # convenience libraries should have the same extension an - # archive normally would. - oldlibs="$output_objdir/$libname.$libext $oldlibs" - build_libtool_libs=convenience - build_old_libs=yes - fi - - test -n "$vinfo" && \ - func_warning "\`-version-info/-version-number' is ignored for convenience libraries" - - test -n "$release" && \ - func_warning "\`-release' is ignored for convenience libraries" - else - - # Parse the version information argument. - save_ifs="$IFS"; IFS=':' - set dummy $vinfo 0 0 0 - shift - IFS="$save_ifs" - - test -n "$7" && \ - func_fatal_help "too many parameters to \`-version-info'" - - # convert absolute version numbers to libtool ages - # this retains compatibility with .la files and attempts - # to make the code below a bit more comprehensible - - case $vinfo_number in - yes) - number_major="$1" - number_minor="$2" - number_revision="$3" - # - # There are really only two kinds -- those that - # use the current revision as the major version - # and those that subtract age and use age as - # a minor version. But, then there is irix - # which has an extra 1 added just for fun - # - case $version_type in - darwin|linux|osf|windows|none) - func_arith $number_major + $number_minor - current=$func_arith_result - age="$number_minor" - revision="$number_revision" - ;; - freebsd-aout|freebsd-elf|sunos) - current="$number_major" - revision="$number_minor" - age="0" - ;; - irix|nonstopux) - func_arith $number_major + $number_minor - current=$func_arith_result - age="$number_minor" - revision="$number_minor" - lt_irix_increment=no - ;; - *) - func_fatal_configuration "$modename: unknown library version type \`$version_type'" - ;; - esac - ;; - no) - current="$1" - revision="$2" - age="$3" - ;; - esac - - # Check that each of the things are valid numbers. - case $current in - 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;; - *) - func_error "CURRENT \`$current' must be a nonnegative integer" - func_fatal_error "\`$vinfo' is not valid version information" - ;; - esac - - case $revision in - 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;; - *) - func_error "REVISION \`$revision' must be a nonnegative integer" - func_fatal_error "\`$vinfo' is not valid version information" - ;; - esac - - case $age in - 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;; - *) - func_error "AGE \`$age' must be a nonnegative integer" - func_fatal_error "\`$vinfo' is not valid version information" - ;; - esac - - if test "$age" -gt "$current"; then - func_error "AGE \`$age' is greater than the current interface number \`$current'" - func_fatal_error "\`$vinfo' is not valid version information" - fi - - # Calculate the version variables. - major= - versuffix= - verstring= - case $version_type in - none) ;; - - darwin) - # Like Linux, but with the current version available in - # verstring for coding it into the library header - func_arith $current - $age - major=.$func_arith_result - versuffix="$major.$age.$revision" - # Darwin ld doesn't like 0 for these options... - func_arith $current + 1 - minor_current=$func_arith_result - xlcverstring="${wl}-compatibility_version ${wl}$minor_current ${wl}-current_version ${wl}$minor_current.$revision" - verstring="-compatibility_version $minor_current -current_version $minor_current.$revision" - ;; - - freebsd-aout) - major=".$current" - versuffix=".$current.$revision"; - ;; - - freebsd-elf) - major=".$current" - versuffix=".$current" - ;; - - irix | nonstopux) - if test "X$lt_irix_increment" = "Xno"; then - func_arith $current - $age - else - func_arith $current - $age + 1 - fi - major=$func_arith_result - - case $version_type in - nonstopux) verstring_prefix=nonstopux ;; - *) verstring_prefix=sgi ;; - esac - verstring="$verstring_prefix$major.$revision" - - # Add in all the interfaces that we are compatible with. - loop=$revision - while test "$loop" -ne 0; do - func_arith $revision - $loop - iface=$func_arith_result - func_arith $loop - 1 - loop=$func_arith_result - verstring="$verstring_prefix$major.$iface:$verstring" - done - - # Before this point, $major must not contain `.'. - major=.$major - versuffix="$major.$revision" - ;; - - linux) - func_arith $current - $age - major=.$func_arith_result - versuffix="$major.$age.$revision" - ;; - - osf) - func_arith $current - $age - major=.$func_arith_result - versuffix=".$current.$age.$revision" - verstring="$current.$age.$revision" - - # Add in all the interfaces that we are compatible with. - loop=$age - while test "$loop" -ne 0; do - func_arith $current - $loop - iface=$func_arith_result - func_arith $loop - 1 - loop=$func_arith_result - verstring="$verstring:${iface}.0" - done - - # Make executables depend on our current version. - verstring="$verstring:${current}.0" - ;; - - qnx) - major=".$current" - versuffix=".$current" - ;; - - sunos) - major=".$current" - versuffix=".$current.$revision" - ;; - - windows) - # Use '-' rather than '.', since we only want one - # extension on DOS 8.3 filesystems. - func_arith $current - $age - major=$func_arith_result - versuffix="-$major" - ;; - - *) - func_fatal_configuration "unknown library version type \`$version_type'" - ;; - esac - - # Clear the version info if we defaulted, and they specified a release. - if test -z "$vinfo" && test -n "$release"; then - major= - case $version_type in - darwin) - # we can't check for "0.0" in archive_cmds due to quoting - # problems, so we reset it completely - verstring= - ;; - *) - verstring="0.0" - ;; - esac - if test "$need_version" = no; then - versuffix= - else - versuffix=".0.0" - fi - fi - - # Remove version info from name if versioning should be avoided - if test "$avoid_version" = yes && test "$need_version" = no; then - major= - versuffix= - verstring="" - fi - - # Check to see if the archive will have undefined symbols. - if test "$allow_undefined" = yes; then - if test "$allow_undefined_flag" = unsupported; then - func_warning "undefined symbols not allowed in $host shared libraries" - build_libtool_libs=no - build_old_libs=yes - fi - else - # Don't allow undefined symbols. - allow_undefined_flag="$no_undefined_flag" - fi - - fi - - func_generate_dlsyms "$libname" "$libname" "yes" - libobjs="$libobjs $symfileobj" - test "X$libobjs" = "X " && libobjs= - - if test "$mode" != relink; then - # Remove our outputs, but don't remove object files since they - # may have been created when compiling PIC objects. - removelist= - tempremovelist=`$ECHO "$output_objdir/*"` - for p in $tempremovelist; do - case $p in - *.$objext | *.gcno) - ;; - $output_objdir/$outputname | $output_objdir/$libname.* | $output_objdir/${libname}${release}.*) - if test "X$precious_files_regex" != "X"; then - if $ECHO "$p" | $EGREP -e "$precious_files_regex" >/dev/null 2>&1 - then - continue - fi - fi - removelist="$removelist $p" - ;; - *) ;; - esac - done - test -n "$removelist" && \ - func_show_eval "${RM}r \$removelist" - fi - - # Now set the variables for building old libraries. - if test "$build_old_libs" = yes && test "$build_libtool_libs" != convenience ; then - oldlibs="$oldlibs $output_objdir/$libname.$libext" - - # Transform .lo files to .o files. - oldobjs="$objs "`$ECHO "X$libobjs" | $SP2NL | $Xsed -e '/\.'${libext}'$/d' -e "$lo2o" | $NL2SP` - fi - - # Eliminate all temporary directories. - #for path in $notinst_path; do - # lib_search_path=`$ECHO "X$lib_search_path " | $Xsed -e "s% $path % %g"` - # deplibs=`$ECHO "X$deplibs " | $Xsed -e "s% -L$path % %g"` - # dependency_libs=`$ECHO "X$dependency_libs " | $Xsed -e "s% -L$path % %g"` - #done - - if test -n "$xrpath"; then - # If the user specified any rpath flags, then add them. - temp_xrpath= - for libdir in $xrpath; do - temp_xrpath="$temp_xrpath -R$libdir" - case "$finalize_rpath " in - *" $libdir "*) ;; - *) finalize_rpath="$finalize_rpath $libdir" ;; - esac - done - if test "$hardcode_into_libs" != yes || test "$build_old_libs" = yes; then - dependency_libs="$temp_xrpath $dependency_libs" - fi - fi - - # Make sure dlfiles contains only unique files that won't be dlpreopened - old_dlfiles="$dlfiles" - dlfiles= - for lib in $old_dlfiles; do - case " $dlprefiles $dlfiles " in - *" $lib "*) ;; - *) dlfiles="$dlfiles $lib" ;; - esac - done - - # Make sure dlprefiles contains only unique files - old_dlprefiles="$dlprefiles" - dlprefiles= - for lib in $old_dlprefiles; do - case "$dlprefiles " in - *" $lib "*) ;; - *) dlprefiles="$dlprefiles $lib" ;; - esac - done - - if test "$build_libtool_libs" = yes; then - if test -n "$rpath"; then - case $host in - *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-beos* | *-cegcc*) - # these systems don't actually have a c library (as such)! - ;; - *-*-rhapsody* | *-*-darwin1.[012]) - # Rhapsody C library is in the System framework - deplibs="$deplibs System.ltframework" - ;; - *-*-netbsd*) - # Don't link with libc until the a.out ld.so is fixed. - ;; - *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*) - # Do not include libc due to us having libc/libc_r. - ;; - *-*-sco3.2v5* | *-*-sco5v6*) - # Causes problems with __ctype - ;; - *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*) - # Compiler inserts libc in the correct place for threads to work - ;; - *) - # Add libc to deplibs on all other systems if necessary. - if test "$build_libtool_need_lc" = "yes"; then - deplibs="$deplibs -lc" - fi - ;; - esac - fi - - # Transform deplibs into only deplibs that can be linked in shared. - name_save=$name - libname_save=$libname - release_save=$release - versuffix_save=$versuffix - major_save=$major - # I'm not sure if I'm treating the release correctly. I think - # release should show up in the -l (ie -lgmp5) so we don't want to - # add it in twice. Is that correct? - release="" - versuffix="" - major="" - newdeplibs= - droppeddeps=no - case $deplibs_check_method in - pass_all) - # Don't check for shared/static. Everything works. - # This might be a little naive. We might want to check - # whether the library exists or not. But this is on - # osf3 & osf4 and I'm not really sure... Just - # implementing what was already the behavior. - newdeplibs=$deplibs - ;; - test_compile) - # This code stresses the "libraries are programs" paradigm to its - # limits. Maybe even breaks it. We compile a program, linking it - # against the deplibs as a proxy for the library. Then we can check - # whether they linked in statically or dynamically with ldd. - $opt_dry_run || $RM conftest.c - cat > conftest.c <<EOF - int main() { return 0; } -EOF - $opt_dry_run || $RM conftest - if $LTCC $LTCFLAGS -o conftest conftest.c $deplibs; then - ldd_output=`ldd conftest` - for i in $deplibs; do - case $i in - -l*) - func_stripname -l '' "$i" - name=$func_stripname_result - if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - case " $predeps $postdeps " in - *" $i "*) - newdeplibs="$newdeplibs $i" - i="" - ;; - esac - fi - if test -n "$i" ; then - libname=`eval "\\$ECHO \"$libname_spec\""` - deplib_matches=`eval "\\$ECHO \"$library_names_spec\""` - set dummy $deplib_matches; shift - deplib_match=$1 - if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then - newdeplibs="$newdeplibs $i" - else - droppeddeps=yes - $ECHO - $ECHO "*** Warning: dynamic linker does not accept needed library $i." - $ECHO "*** I have the capability to make that library automatically link in when" - $ECHO "*** you link to this library. But I can only do this if you have a" - $ECHO "*** shared version of the library, which I believe you do not have" - $ECHO "*** because a test_compile did reveal that the linker did not use it for" - $ECHO "*** its dynamic dependency list that programs get resolved with at runtime." - fi - fi - ;; - *) - newdeplibs="$newdeplibs $i" - ;; - esac - done - else - # Error occurred in the first compile. Let's try to salvage - # the situation: Compile a separate program for each library. - for i in $deplibs; do - case $i in - -l*) - func_stripname -l '' "$i" - name=$func_stripname_result - $opt_dry_run || $RM conftest - if $LTCC $LTCFLAGS -o conftest conftest.c $i; then - ldd_output=`ldd conftest` - if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - case " $predeps $postdeps " in - *" $i "*) - newdeplibs="$newdeplibs $i" - i="" - ;; - esac - fi - if test -n "$i" ; then - libname=`eval "\\$ECHO \"$libname_spec\""` - deplib_matches=`eval "\\$ECHO \"$library_names_spec\""` - set dummy $deplib_matches; shift - deplib_match=$1 - if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then - newdeplibs="$newdeplibs $i" - else - droppeddeps=yes - $ECHO - $ECHO "*** Warning: dynamic linker does not accept needed library $i." - $ECHO "*** I have the capability to make that library automatically link in when" - $ECHO "*** you link to this library. But I can only do this if you have a" - $ECHO "*** shared version of the library, which you do not appear to have" - $ECHO "*** because a test_compile did reveal that the linker did not use this one" - $ECHO "*** as a dynamic dependency that programs can get resolved with at runtime." - fi - fi - else - droppeddeps=yes - $ECHO - $ECHO "*** Warning! Library $i is needed by this library but I was not able to" - $ECHO "*** make it link in! You will probably need to install it or some" - $ECHO "*** library that it depends on before this library will be fully" - $ECHO "*** functional. Installing it before continuing would be even better." - fi - ;; - *) - newdeplibs="$newdeplibs $i" - ;; - esac - done - fi - ;; - file_magic*) - set dummy $deplibs_check_method; shift - file_magic_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"` - for a_deplib in $deplibs; do - case $a_deplib in - -l*) - func_stripname -l '' "$a_deplib" - name=$func_stripname_result - if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - case " $predeps $postdeps " in - *" $a_deplib "*) - newdeplibs="$newdeplibs $a_deplib" - a_deplib="" - ;; - esac - fi - if test -n "$a_deplib" ; then - libname=`eval "\\$ECHO \"$libname_spec\""` - for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do - potential_libs=`ls $i/$libname[.-]* 2>/dev/null` - for potent_lib in $potential_libs; do - # Follow soft links. - if ls -lLd "$potent_lib" 2>/dev/null | - $GREP " -> " >/dev/null; then - continue - fi - # The statement above tries to avoid entering an - # endless loop below, in case of cyclic links. - # We might still enter an endless loop, since a link - # loop can be closed while we follow links, - # but so what? - potlib="$potent_lib" - while test -h "$potlib" 2>/dev/null; do - potliblink=`ls -ld $potlib | ${SED} 's/.* -> //'` - case $potliblink in - [\\/]* | [A-Za-z]:[\\/]*) potlib="$potliblink";; - *) potlib=`$ECHO "X$potlib" | $Xsed -e 's,[^/]*$,,'`"$potliblink";; - esac - done - if eval $file_magic_cmd \"\$potlib\" 2>/dev/null | - $SED -e 10q | - $EGREP "$file_magic_regex" > /dev/null; then - newdeplibs="$newdeplibs $a_deplib" - a_deplib="" - break 2 - fi - done - done - fi - if test -n "$a_deplib" ; then - droppeddeps=yes - $ECHO - $ECHO "*** Warning: linker path does not have real file for library $a_deplib." - $ECHO "*** I have the capability to make that library automatically link in when" - $ECHO "*** you link to this library. But I can only do this if you have a" - $ECHO "*** shared version of the library, which you do not appear to have" - $ECHO "*** because I did check the linker path looking for a file starting" - if test -z "$potlib" ; then - $ECHO "*** with $libname but no candidates were found. (...for file magic test)" - else - $ECHO "*** with $libname and none of the candidates passed a file format test" - $ECHO "*** using a file magic. Last file checked: $potlib" - fi - fi - ;; - *) - # Add a -L argument. - newdeplibs="$newdeplibs $a_deplib" - ;; - esac - done # Gone through all deplibs. - ;; - match_pattern*) - set dummy $deplibs_check_method; shift - match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"` - for a_deplib in $deplibs; do - case $a_deplib in - -l*) - func_stripname -l '' "$a_deplib" - name=$func_stripname_result - if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - case " $predeps $postdeps " in - *" $a_deplib "*) - newdeplibs="$newdeplibs $a_deplib" - a_deplib="" - ;; - esac - fi - if test -n "$a_deplib" ; then - libname=`eval "\\$ECHO \"$libname_spec\""` - for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do - potential_libs=`ls $i/$libname[.-]* 2>/dev/null` - for potent_lib in $potential_libs; do - potlib="$potent_lib" # see symlink-check above in file_magic test - if eval "\$ECHO \"X$potent_lib\"" 2>/dev/null | $Xsed -e 10q | \ - $EGREP "$match_pattern_regex" > /dev/null; then - newdeplibs="$newdeplibs $a_deplib" - a_deplib="" - break 2 - fi - done - done - fi - if test -n "$a_deplib" ; then - droppeddeps=yes - $ECHO - $ECHO "*** Warning: linker path does not have real file for library $a_deplib." - $ECHO "*** I have the capability to make that library automatically link in when" - $ECHO "*** you link to this library. But I can only do this if you have a" - $ECHO "*** shared version of the library, which you do not appear to have" - $ECHO "*** because I did check the linker path looking for a file starting" - if test -z "$potlib" ; then - $ECHO "*** with $libname but no candidates were found. (...for regex pattern test)" - else - $ECHO "*** with $libname and none of the candidates passed a file format test" - $ECHO "*** using a regex pattern. Last file checked: $potlib" - fi - fi - ;; - *) - # Add a -L argument. - newdeplibs="$newdeplibs $a_deplib" - ;; - esac - done # Gone through all deplibs. - ;; - none | unknown | *) - newdeplibs="" - tmp_deplibs=`$ECHO "X $deplibs" | $Xsed \ - -e 's/ -lc$//' -e 's/ -[LR][^ ]*//g'` - if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - for i in $predeps $postdeps ; do - # can't use Xsed below, because $i might contain '/' - tmp_deplibs=`$ECHO "X $tmp_deplibs" | $Xsed -e "s,$i,,"` - done - fi - if $ECHO "X $tmp_deplibs" | $Xsed -e 's/[ ]//g' | - $GREP . >/dev/null; then - $ECHO - if test "X$deplibs_check_method" = "Xnone"; then - $ECHO "*** Warning: inter-library dependencies are not supported in this platform." - else - $ECHO "*** Warning: inter-library dependencies are not known to be supported." - fi - $ECHO "*** All declared inter-library dependencies are being dropped." - droppeddeps=yes - fi - ;; - esac - versuffix=$versuffix_save - major=$major_save - release=$release_save - libname=$libname_save - name=$name_save - - case $host in - *-*-rhapsody* | *-*-darwin1.[012]) - # On Rhapsody replace the C library with the System framework - newdeplibs=`$ECHO "X $newdeplibs" | $Xsed -e 's/ -lc / System.ltframework /'` - ;; - esac - - if test "$droppeddeps" = yes; then - if test "$module" = yes; then - $ECHO - $ECHO "*** Warning: libtool could not satisfy all declared inter-library" - $ECHO "*** dependencies of module $libname. Therefore, libtool will create" - $ECHO "*** a static module, that should work as long as the dlopening" - $ECHO "*** application is linked with the -dlopen flag." - if test -z "$global_symbol_pipe"; then - $ECHO - $ECHO "*** However, this would only work if libtool was able to extract symbol" - $ECHO "*** lists from a program, using \`nm' or equivalent, but libtool could" - $ECHO "*** not find such a program. So, this module is probably useless." - $ECHO "*** \`nm' from GNU binutils and a full rebuild may help." - fi - if test "$build_old_libs" = no; then - oldlibs="$output_objdir/$libname.$libext" - build_libtool_libs=module - build_old_libs=yes - else - build_libtool_libs=no - fi - else - $ECHO "*** The inter-library dependencies that have been dropped here will be" - $ECHO "*** automatically added whenever a program is linked with this library" - $ECHO "*** or is declared to -dlopen it." - - if test "$allow_undefined" = no; then - $ECHO - $ECHO "*** Since this library must not contain undefined symbols," - $ECHO "*** because either the platform does not support them or" - $ECHO "*** it was explicitly requested with -no-undefined," - $ECHO "*** libtool will only create a static version of it." - if test "$build_old_libs" = no; then - oldlibs="$output_objdir/$libname.$libext" - build_libtool_libs=module - build_old_libs=yes - else - build_libtool_libs=no - fi - fi - fi - fi - # Done checking deplibs! - deplibs=$newdeplibs - fi - # Time to change all our "foo.ltframework" stuff back to "-framework foo" - case $host in - *-*-darwin*) - newdeplibs=`$ECHO "X $newdeplibs" | $Xsed -e 's% \([^ $]*\).ltframework% -framework \1%g'` - new_inherited_linker_flags=`$ECHO "X $new_inherited_linker_flags" | $Xsed -e 's% \([^ $]*\).ltframework% -framework \1%g'` - deplibs=`$ECHO "X $deplibs" | $Xsed -e 's% \([^ $]*\).ltframework% -framework \1%g'` - ;; - esac - - # move library search paths that coincide with paths to not yet - # installed libraries to the beginning of the library search list - new_libs= - for path in $notinst_path; do - case " $new_libs " in - *" -L$path/$objdir "*) ;; - *) - case " $deplibs " in - *" -L$path/$objdir "*) - new_libs="$new_libs -L$path/$objdir" ;; - esac - ;; - esac - done - for deplib in $deplibs; do - case $deplib in - -L*) - case " $new_libs " in - *" $deplib "*) ;; - *) new_libs="$new_libs $deplib" ;; - esac - ;; - *) new_libs="$new_libs $deplib" ;; - esac - done - deplibs="$new_libs" - - # All the library-specific variables (install_libdir is set above). - library_names= - old_library= - dlname= - - # Test again, we may have decided not to build it any more - if test "$build_libtool_libs" = yes; then - if test "$hardcode_into_libs" = yes; then - # Hardcode the library paths - hardcode_libdirs= - dep_rpath= - rpath="$finalize_rpath" - test "$mode" != relink && rpath="$compile_rpath$rpath" - for libdir in $rpath; do - if test -n "$hardcode_libdir_flag_spec"; then - if test -n "$hardcode_libdir_separator"; then - if test -z "$hardcode_libdirs"; then - hardcode_libdirs="$libdir" - else - # Just accumulate the unique libdirs. - case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in - *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) - ;; - *) - hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir" - ;; - esac - fi - else - eval flag=\"$hardcode_libdir_flag_spec\" - dep_rpath="$dep_rpath $flag" - fi - elif test -n "$runpath_var"; then - case "$perm_rpath " in - *" $libdir "*) ;; - *) perm_rpath="$perm_rpath $libdir" ;; - esac - fi - done - # Substitute the hardcoded libdirs into the rpath. - if test -n "$hardcode_libdir_separator" && - test -n "$hardcode_libdirs"; then - libdir="$hardcode_libdirs" - if test -n "$hardcode_libdir_flag_spec_ld"; then - eval dep_rpath=\"$hardcode_libdir_flag_spec_ld\" - else - eval dep_rpath=\"$hardcode_libdir_flag_spec\" - fi - fi - if test -n "$runpath_var" && test -n "$perm_rpath"; then - # We should set the runpath_var. - rpath= - for dir in $perm_rpath; do - rpath="$rpath$dir:" - done - eval "$runpath_var='$rpath\$$runpath_var'; export $runpath_var" - fi - test -n "$dep_rpath" && deplibs="$dep_rpath $deplibs" - fi - - shlibpath="$finalize_shlibpath" - test "$mode" != relink && shlibpath="$compile_shlibpath$shlibpath" - if test -n "$shlibpath"; then - eval "$shlibpath_var='$shlibpath\$$shlibpath_var'; export $shlibpath_var" - fi - - # Get the real and link names of the library. - eval shared_ext=\"$shrext_cmds\" - eval library_names=\"$library_names_spec\" - set dummy $library_names - shift - realname="$1" - shift - - if test -n "$soname_spec"; then - eval soname=\"$soname_spec\" - else - soname="$realname" - fi - if test -z "$dlname"; then - dlname=$soname - fi - - lib="$output_objdir/$realname" - linknames= - for link - do - linknames="$linknames $link" - done - - # Use standard objects if they are pic - test -z "$pic_flag" && libobjs=`$ECHO "X$libobjs" | $SP2NL | $Xsed -e "$lo2o" | $NL2SP` - test "X$libobjs" = "X " && libobjs= - - delfiles= - if test -n "$export_symbols" && test -n "$include_expsyms"; then - $opt_dry_run || cp "$export_symbols" "$output_objdir/$libname.uexp" - export_symbols="$output_objdir/$libname.uexp" - delfiles="$delfiles $export_symbols" - fi - - orig_export_symbols= - case $host_os in - cygwin* | mingw* | cegcc*) - if test -n "$export_symbols" && test -z "$export_symbols_regex"; then - # exporting using user supplied symfile - if test "x`$SED 1q $export_symbols`" != xEXPORTS; then - # and it's NOT already a .def file. Must figure out - # which of the given symbols are data symbols and tag - # them as such. So, trigger use of export_symbols_cmds. - # export_symbols gets reassigned inside the "prepare - # the list of exported symbols" if statement, so the - # include_expsyms logic still works. - orig_export_symbols="$export_symbols" - export_symbols= - always_export_symbols=yes - fi - fi - ;; - esac - - # Prepare the list of exported symbols - if test -z "$export_symbols"; then - if test "$always_export_symbols" = yes || test -n "$export_symbols_regex"; then - func_verbose "generating symbol list for \`$libname.la'" - export_symbols="$output_objdir/$libname.exp" - $opt_dry_run || $RM $export_symbols - cmds=$export_symbols_cmds - save_ifs="$IFS"; IFS='~' - for cmd in $cmds; do - IFS="$save_ifs" - eval cmd=\"$cmd\" - func_len " $cmd" - len=$func_len_result - if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then - func_show_eval "$cmd" 'exit $?' - skipped_export=false - else - # The command line is too long to execute in one step. - func_verbose "using reloadable object file for export list..." - skipped_export=: - # Break out early, otherwise skipped_export may be - # set to false by a later but shorter cmd. - break - fi - done - IFS="$save_ifs" - if test -n "$export_symbols_regex" && test "X$skipped_export" != "X:"; then - func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"' - func_show_eval '$MV "${export_symbols}T" "$export_symbols"' - fi - fi - fi - - if test -n "$export_symbols" && test -n "$include_expsyms"; then - tmp_export_symbols="$export_symbols" - test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols" - $opt_dry_run || eval '$ECHO "X$include_expsyms" | $Xsed | $SP2NL >> "$tmp_export_symbols"' - fi - - if test "X$skipped_export" != "X:" && test -n "$orig_export_symbols"; then - # The given exports_symbols file has to be filtered, so filter it. - func_verbose "filter symbol list for \`$libname.la' to tag DATA exports" - # FIXME: $output_objdir/$libname.filter potentially contains lots of - # 's' commands which not all seds can handle. GNU sed should be fine - # though. Also, the filter scales superlinearly with the number of - # global variables. join(1) would be nice here, but unfortunately - # isn't a blessed tool. - $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter - delfiles="$delfiles $export_symbols $output_objdir/$libname.filter" - export_symbols=$output_objdir/$libname.def - $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols - fi - - tmp_deplibs= - for test_deplib in $deplibs; do - case " $convenience " in - *" $test_deplib "*) ;; - *) - tmp_deplibs="$tmp_deplibs $test_deplib" - ;; - esac - done - deplibs="$tmp_deplibs" - - if test -n "$convenience"; then - if test -n "$whole_archive_flag_spec" && - test "$compiler_needs_object" = yes && - test -z "$libobjs"; then - # extract the archives, so we have objects to list. - # TODO: could optimize this to just extract one archive. - whole_archive_flag_spec= - fi - if test -n "$whole_archive_flag_spec"; then - save_libobjs=$libobjs - eval libobjs=\"\$libobjs $whole_archive_flag_spec\" - test "X$libobjs" = "X " && libobjs= - else - gentop="$output_objdir/${outputname}x" - generated="$generated $gentop" - - func_extract_archives $gentop $convenience - libobjs="$libobjs $func_extract_archives_result" - test "X$libobjs" = "X " && libobjs= - fi - fi - - if test "$thread_safe" = yes && test -n "$thread_safe_flag_spec"; then - eval flag=\"$thread_safe_flag_spec\" - linker_flags="$linker_flags $flag" - fi - - # Make a backup of the uninstalled library when relinking - if test "$mode" = relink; then - $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U)' || exit $? - fi - - # Do each of the archive commands. - if test "$module" = yes && test -n "$module_cmds" ; then - if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then - eval test_cmds=\"$module_expsym_cmds\" - cmds=$module_expsym_cmds - else - eval test_cmds=\"$module_cmds\" - cmds=$module_cmds - fi - else - if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then - eval test_cmds=\"$archive_expsym_cmds\" - cmds=$archive_expsym_cmds - else - eval test_cmds=\"$archive_cmds\" - cmds=$archive_cmds - fi - fi - - if test "X$skipped_export" != "X:" && - func_len " $test_cmds" && - len=$func_len_result && - test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then - : - else - # The command line is too long to link in one step, link piecewise - # or, if using GNU ld and skipped_export is not :, use a linker - # script. - - # Save the value of $output and $libobjs because we want to - # use them later. If we have whole_archive_flag_spec, we - # want to use save_libobjs as it was before - # whole_archive_flag_spec was expanded, because we can't - # assume the linker understands whole_archive_flag_spec. - # This may have to be revisited, in case too many - # convenience libraries get linked in and end up exceeding - # the spec. - if test -z "$convenience" || test -z "$whole_archive_flag_spec"; then - save_libobjs=$libobjs - fi - save_output=$output - output_la=`$ECHO "X$output" | $Xsed -e "$basename"` - - # Clear the reloadable object creation command queue and - # initialize k to one. - test_cmds= - concat_cmds= - objlist= - last_robj= - k=1 - - if test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "$with_gnu_ld" = yes; then - output=${output_objdir}/${output_la}.lnkscript - func_verbose "creating GNU ld script: $output" - $ECHO 'INPUT (' > $output - for obj in $save_libobjs - do - $ECHO "$obj" >> $output - done - $ECHO ')' >> $output - delfiles="$delfiles $output" - elif test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "X$file_list_spec" != X; then - output=${output_objdir}/${output_la}.lnk - func_verbose "creating linker input file list: $output" - : > $output - set x $save_libobjs - shift - firstobj= - if test "$compiler_needs_object" = yes; then - firstobj="$1 " - shift - fi - for obj - do - $ECHO "$obj" >> $output - done - delfiles="$delfiles $output" - output=$firstobj\"$file_list_spec$output\" - else - if test -n "$save_libobjs"; then - func_verbose "creating reloadable object files..." - output=$output_objdir/$output_la-${k}.$objext - eval test_cmds=\"$reload_cmds\" - func_len " $test_cmds" - len0=$func_len_result - len=$len0 - - # Loop over the list of objects to be linked. - for obj in $save_libobjs - do - func_len " $obj" - func_arith $len + $func_len_result - len=$func_arith_result - if test "X$objlist" = X || - test "$len" -lt "$max_cmd_len"; then - func_append objlist " $obj" - else - # The command $test_cmds is almost too long, add a - # command to the queue. - if test "$k" -eq 1 ; then - # The first file doesn't have a previous command to add. - eval concat_cmds=\"$reload_cmds $objlist $last_robj\" - else - # All subsequent reloadable object files will link in - # the last one created. - eval concat_cmds=\"\$concat_cmds~$reload_cmds $objlist $last_robj~\$RM $last_robj\" - fi - last_robj=$output_objdir/$output_la-${k}.$objext - func_arith $k + 1 - k=$func_arith_result - output=$output_objdir/$output_la-${k}.$objext - objlist=$obj - func_len " $last_robj" - func_arith $len0 + $func_len_result - len=$func_arith_result - fi - done - # Handle the remaining objects by creating one last - # reloadable object file. All subsequent reloadable object - # files will link in the last one created. - test -z "$concat_cmds" || concat_cmds=$concat_cmds~ - eval concat_cmds=\"\${concat_cmds}$reload_cmds $objlist $last_robj\" - if test -n "$last_robj"; then - eval concat_cmds=\"\${concat_cmds}~\$RM $last_robj\" - fi - delfiles="$delfiles $output" - - else - output= - fi - - if ${skipped_export-false}; then - func_verbose "generating symbol list for \`$libname.la'" - export_symbols="$output_objdir/$libname.exp" - $opt_dry_run || $RM $export_symbols - libobjs=$output - # Append the command to create the export file. - test -z "$concat_cmds" || concat_cmds=$concat_cmds~ - eval concat_cmds=\"\$concat_cmds$export_symbols_cmds\" - if test -n "$last_robj"; then - eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\" - fi - fi - - test -n "$save_libobjs" && - func_verbose "creating a temporary reloadable object file: $output" - - # Loop through the commands generated above and execute them. - save_ifs="$IFS"; IFS='~' - for cmd in $concat_cmds; do - IFS="$save_ifs" - $opt_silent || { - func_quote_for_expand "$cmd" - eval "func_echo $func_quote_for_expand_result" - } - $opt_dry_run || eval "$cmd" || { - lt_exit=$? - - # Restore the uninstalled library and exit - if test "$mode" = relink; then - ( cd "$output_objdir" && \ - $RM "${realname}T" && \ - $MV "${realname}U" "$realname" ) - fi - - exit $lt_exit - } - done - IFS="$save_ifs" - - if test -n "$export_symbols_regex" && ${skipped_export-false}; then - func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"' - func_show_eval '$MV "${export_symbols}T" "$export_symbols"' - fi - fi - - if ${skipped_export-false}; then - if test -n "$export_symbols" && test -n "$include_expsyms"; then - tmp_export_symbols="$export_symbols" - test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols" - $opt_dry_run || eval '$ECHO "X$include_expsyms" | $Xsed | $SP2NL >> "$tmp_export_symbols"' - fi - - if test -n "$orig_export_symbols"; then - # The given exports_symbols file has to be filtered, so filter it. - func_verbose "filter symbol list for \`$libname.la' to tag DATA exports" - # FIXME: $output_objdir/$libname.filter potentially contains lots of - # 's' commands which not all seds can handle. GNU sed should be fine - # though. Also, the filter scales superlinearly with the number of - # global variables. join(1) would be nice here, but unfortunately - # isn't a blessed tool. - $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter - delfiles="$delfiles $export_symbols $output_objdir/$libname.filter" - export_symbols=$output_objdir/$libname.def - $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols - fi - fi - - libobjs=$output - # Restore the value of output. - output=$save_output - - if test -n "$convenience" && test -n "$whole_archive_flag_spec"; then - eval libobjs=\"\$libobjs $whole_archive_flag_spec\" - test "X$libobjs" = "X " && libobjs= - fi - # Expand the library linking commands again to reset the - # value of $libobjs for piecewise linking. - - # Do each of the archive commands. - if test "$module" = yes && test -n "$module_cmds" ; then - if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then - cmds=$module_expsym_cmds - else - cmds=$module_cmds - fi - else - if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then - cmds=$archive_expsym_cmds - else - cmds=$archive_cmds - fi - fi - fi - - if test -n "$delfiles"; then - # Append the command to remove temporary files to $cmds. - eval cmds=\"\$cmds~\$RM $delfiles\" - fi - - # Add any objects from preloaded convenience libraries - if test -n "$dlprefiles"; then - gentop="$output_objdir/${outputname}x" - generated="$generated $gentop" - - func_extract_archives $gentop $dlprefiles - libobjs="$libobjs $func_extract_archives_result" - test "X$libobjs" = "X " && libobjs= - fi - - save_ifs="$IFS"; IFS='~' - for cmd in $cmds; do - IFS="$save_ifs" - eval cmd=\"$cmd\" - $opt_silent || { - func_quote_for_expand "$cmd" - eval "func_echo $func_quote_for_expand_result" - } - $opt_dry_run || eval "$cmd" || { - lt_exit=$? - - # Restore the uninstalled library and exit - if test "$mode" = relink; then - ( cd "$output_objdir" && \ - $RM "${realname}T" && \ - $MV "${realname}U" "$realname" ) - fi - - exit $lt_exit - } - done - IFS="$save_ifs" - - # Restore the uninstalled library and exit - if test "$mode" = relink; then - $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname)' || exit $? - - if test -n "$convenience"; then - if test -z "$whole_archive_flag_spec"; then - func_show_eval '${RM}r "$gentop"' - fi - fi - - exit $EXIT_SUCCESS - fi - - # Create links to the real library. - for linkname in $linknames; do - if test "$realname" != "$linkname"; then - func_show_eval '(cd "$output_objdir" && $RM "$linkname" && $LN_S "$realname" "$linkname")' 'exit $?' - fi - done - - # If -module or -export-dynamic was specified, set the dlname. - if test "$module" = yes || test "$export_dynamic" = yes; then - # On all known operating systems, these are identical. - dlname="$soname" - fi - fi - ;; - - obj) - if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then - func_warning "\`-dlopen' is ignored for objects" - fi - - case " $deplibs" in - *\ -l* | *\ -L*) - func_warning "\`-l' and \`-L' are ignored for objects" ;; - esac - - test -n "$rpath" && \ - func_warning "\`-rpath' is ignored for objects" - - test -n "$xrpath" && \ - func_warning "\`-R' is ignored for objects" - - test -n "$vinfo" && \ - func_warning "\`-version-info' is ignored for objects" - - test -n "$release" && \ - func_warning "\`-release' is ignored for objects" - - case $output in - *.lo) - test -n "$objs$old_deplibs" && \ - func_fatal_error "cannot build library object \`$output' from non-libtool objects" - - libobj=$output - func_lo2o "$libobj" - obj=$func_lo2o_result - ;; - *) - libobj= - obj="$output" - ;; - esac - - # Delete the old objects. - $opt_dry_run || $RM $obj $libobj - - # Objects from convenience libraries. This assumes - # single-version convenience libraries. Whenever we create - # different ones for PIC/non-PIC, this we'll have to duplicate - # the extraction. - reload_conv_objs= - gentop= - # reload_cmds runs $LD directly, so let us get rid of - # -Wl from whole_archive_flag_spec and hope we can get by with - # turning comma into space.. - wl= - - if test -n "$convenience"; then - if test -n "$whole_archive_flag_spec"; then - eval tmp_whole_archive_flags=\"$whole_archive_flag_spec\" - reload_conv_objs=$reload_objs\ `$ECHO "X$tmp_whole_archive_flags" | $Xsed -e 's|,| |g'` - else - gentop="$output_objdir/${obj}x" - generated="$generated $gentop" - - func_extract_archives $gentop $convenience - reload_conv_objs="$reload_objs $func_extract_archives_result" - fi - fi - - # Create the old-style object. - reload_objs="$objs$old_deplibs "`$ECHO "X$libobjs" | $SP2NL | $Xsed -e '/\.'${libext}$'/d' -e '/\.lib$/d' -e "$lo2o" | $NL2SP`" $reload_conv_objs" ### testsuite: skip nested quoting test - - output="$obj" - func_execute_cmds "$reload_cmds" 'exit $?' - - # Exit if we aren't doing a library object file. - if test -z "$libobj"; then - if test -n "$gentop"; then - func_show_eval '${RM}r "$gentop"' - fi - - exit $EXIT_SUCCESS - fi - - if test "$build_libtool_libs" != yes; then - if test -n "$gentop"; then - func_show_eval '${RM}r "$gentop"' - fi - - # Create an invalid libtool object if no PIC, so that we don't - # accidentally link it into a program. - # $show "echo timestamp > $libobj" - # $opt_dry_run || eval "echo timestamp > $libobj" || exit $? - exit $EXIT_SUCCESS - fi - - if test -n "$pic_flag" || test "$pic_mode" != default; then - # Only do commands if we really have different PIC objects. - reload_objs="$libobjs $reload_conv_objs" - output="$libobj" - func_execute_cmds "$reload_cmds" 'exit $?' - fi - - if test -n "$gentop"; then - func_show_eval '${RM}r "$gentop"' - fi - - exit $EXIT_SUCCESS - ;; - - prog) - case $host in - *cygwin*) func_stripname '' '.exe' "$output" - output=$func_stripname_result.exe;; - esac - test -n "$vinfo" && \ - func_warning "\`-version-info' is ignored for programs" - - test -n "$release" && \ - func_warning "\`-release' is ignored for programs" - - test "$preload" = yes \ - && test "$dlopen_support" = unknown \ - && test "$dlopen_self" = unknown \ - && test "$dlopen_self_static" = unknown && \ - func_warning "\`LT_INIT([dlopen])' not used. Assuming no dlopen support." - - case $host in - *-*-rhapsody* | *-*-darwin1.[012]) - # On Rhapsody replace the C library is the System framework - compile_deplibs=`$ECHO "X $compile_deplibs" | $Xsed -e 's/ -lc / System.ltframework /'` - finalize_deplibs=`$ECHO "X $finalize_deplibs" | $Xsed -e 's/ -lc / System.ltframework /'` - ;; - esac - - case $host in - *-*-darwin*) - # Don't allow lazy linking, it breaks C++ global constructors - # But is supposedly fixed on 10.4 or later (yay!). - if test "$tagname" = CXX ; then - case ${MACOSX_DEPLOYMENT_TARGET-10.0} in - 10.[0123]) - compile_command="$compile_command ${wl}-bind_at_load" - finalize_command="$finalize_command ${wl}-bind_at_load" - ;; - esac - fi - # Time to change all our "foo.ltframework" stuff back to "-framework foo" - compile_deplibs=`$ECHO "X $compile_deplibs" | $Xsed -e 's% \([^ $]*\).ltframework% -framework \1%g'` - finalize_deplibs=`$ECHO "X $finalize_deplibs" | $Xsed -e 's% \([^ $]*\).ltframework% -framework \1%g'` - ;; - esac - - - # move library search paths that coincide with paths to not yet - # installed libraries to the beginning of the library search list - new_libs= - for path in $notinst_path; do - case " $new_libs " in - *" -L$path/$objdir "*) ;; - *) - case " $compile_deplibs " in - *" -L$path/$objdir "*) - new_libs="$new_libs -L$path/$objdir" ;; - esac - ;; - esac - done - for deplib in $compile_deplibs; do - case $deplib in - -L*) - case " $new_libs " in - *" $deplib "*) ;; - *) new_libs="$new_libs $deplib" ;; - esac - ;; - *) new_libs="$new_libs $deplib" ;; - esac - done - compile_deplibs="$new_libs" - - - compile_command="$compile_command $compile_deplibs" - finalize_command="$finalize_command $finalize_deplibs" - - if test -n "$rpath$xrpath"; then - # If the user specified any rpath flags, then add them. - for libdir in $rpath $xrpath; do - # This is the magic to use -rpath. - case "$finalize_rpath " in - *" $libdir "*) ;; - *) finalize_rpath="$finalize_rpath $libdir" ;; - esac - done - fi - - # Now hardcode the library paths - rpath= - hardcode_libdirs= - for libdir in $compile_rpath $finalize_rpath; do - if test -n "$hardcode_libdir_flag_spec"; then - if test -n "$hardcode_libdir_separator"; then - if test -z "$hardcode_libdirs"; then - hardcode_libdirs="$libdir" - else - # Just accumulate the unique libdirs. - case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in - *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) - ;; - *) - hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir" - ;; - esac - fi - else - eval flag=\"$hardcode_libdir_flag_spec\" - rpath="$rpath $flag" - fi - elif test -n "$runpath_var"; then - case "$perm_rpath " in - *" $libdir "*) ;; - *) perm_rpath="$perm_rpath $libdir" ;; - esac - fi - case $host in - *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*) - testbindir=`${ECHO} "$libdir" | ${SED} -e 's*/lib$*/bin*'` - case :$dllsearchpath: in - *":$libdir:"*) ;; - ::) dllsearchpath=$libdir;; - *) dllsearchpath="$dllsearchpath:$libdir";; - esac - case :$dllsearchpath: in - *":$testbindir:"*) ;; - ::) dllsearchpath=$testbindir;; - *) dllsearchpath="$dllsearchpath:$testbindir";; - esac - ;; - esac - done - # Substitute the hardcoded libdirs into the rpath. - if test -n "$hardcode_libdir_separator" && - test -n "$hardcode_libdirs"; then - libdir="$hardcode_libdirs" - eval rpath=\" $hardcode_libdir_flag_spec\" - fi - compile_rpath="$rpath" - - rpath= - hardcode_libdirs= - for libdir in $finalize_rpath; do - if test -n "$hardcode_libdir_flag_spec"; then - if test -n "$hardcode_libdir_separator"; then - if test -z "$hardcode_libdirs"; then - hardcode_libdirs="$libdir" - else - # Just accumulate the unique libdirs. - case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in - *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) - ;; - *) - hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir" - ;; - esac - fi - else - eval flag=\"$hardcode_libdir_flag_spec\" - rpath="$rpath $flag" - fi - elif test -n "$runpath_var"; then - case "$finalize_perm_rpath " in - *" $libdir "*) ;; - *) finalize_perm_rpath="$finalize_perm_rpath $libdir" ;; - esac - fi - done - # Substitute the hardcoded libdirs into the rpath. - if test -n "$hardcode_libdir_separator" && - test -n "$hardcode_libdirs"; then - libdir="$hardcode_libdirs" - eval rpath=\" $hardcode_libdir_flag_spec\" - fi - finalize_rpath="$rpath" - - if test -n "$libobjs" && test "$build_old_libs" = yes; then - # Transform all the library objects into standard objects. - compile_command=`$ECHO "X$compile_command" | $SP2NL | $Xsed -e "$lo2o" | $NL2SP` - finalize_command=`$ECHO "X$finalize_command" | $SP2NL | $Xsed -e "$lo2o" | $NL2SP` - fi - - func_generate_dlsyms "$outputname" "@PROGRAM@" "no" - - # template prelinking step - if test -n "$prelink_cmds"; then - func_execute_cmds "$prelink_cmds" 'exit $?' - fi - - wrappers_required=yes - case $host in - *cygwin* | *mingw* ) - if test "$build_libtool_libs" != yes; then - wrappers_required=no - fi - ;; - *cegcc) - # Disable wrappers for cegcc, we are cross compiling anyway. - wrappers_required=no - ;; - *) - if test "$need_relink" = no || test "$build_libtool_libs" != yes; then - wrappers_required=no - fi - ;; - esac - if test "$wrappers_required" = no; then - # Replace the output file specification. - compile_command=`$ECHO "X$compile_command" | $Xsed -e 's%@OUTPUT@%'"$output"'%g'` - link_command="$compile_command$compile_rpath" - - # We have no uninstalled library dependencies, so finalize right now. - exit_status=0 - func_show_eval "$link_command" 'exit_status=$?' - - # Delete the generated files. - if test -f "$output_objdir/${outputname}S.${objext}"; then - func_show_eval '$RM "$output_objdir/${outputname}S.${objext}"' - fi - - exit $exit_status - fi - - if test -n "$compile_shlibpath$finalize_shlibpath"; then - compile_command="$shlibpath_var=\"$compile_shlibpath$finalize_shlibpath\$$shlibpath_var\" $compile_command" - fi - if test -n "$finalize_shlibpath"; then - finalize_command="$shlibpath_var=\"$finalize_shlibpath\$$shlibpath_var\" $finalize_command" - fi - - compile_var= - finalize_var= - if test -n "$runpath_var"; then - if test -n "$perm_rpath"; then - # We should set the runpath_var. - rpath= - for dir in $perm_rpath; do - rpath="$rpath$dir:" - done - compile_var="$runpath_var=\"$rpath\$$runpath_var\" " - fi - if test -n "$finalize_perm_rpath"; then - # We should set the runpath_var. - rpath= - for dir in $finalize_perm_rpath; do - rpath="$rpath$dir:" - done - finalize_var="$runpath_var=\"$rpath\$$runpath_var\" " - fi - fi - - if test "$no_install" = yes; then - # We don't need to create a wrapper script. - link_command="$compile_var$compile_command$compile_rpath" - # Replace the output file specification. - link_command=`$ECHO "X$link_command" | $Xsed -e 's%@OUTPUT@%'"$output"'%g'` - # Delete the old output file. - $opt_dry_run || $RM $output - # Link the executable and exit - func_show_eval "$link_command" 'exit $?' - exit $EXIT_SUCCESS - fi - - if test "$hardcode_action" = relink; then - # Fast installation is not supported - link_command="$compile_var$compile_command$compile_rpath" - relink_command="$finalize_var$finalize_command$finalize_rpath" - - func_warning "this platform does not like uninstalled shared libraries" - func_warning "\`$output' will be relinked during installation" - else - if test "$fast_install" != no; then - link_command="$finalize_var$compile_command$finalize_rpath" - if test "$fast_install" = yes; then - relink_command=`$ECHO "X$compile_var$compile_command$compile_rpath" | $Xsed -e 's%@OUTPUT@%\$progdir/\$file%g'` - else - # fast_install is set to needless - relink_command= - fi - else - link_command="$compile_var$compile_command$compile_rpath" - relink_command="$finalize_var$finalize_command$finalize_rpath" - fi - fi - - # Replace the output file specification. - link_command=`$ECHO "X$link_command" | $Xsed -e 's%@OUTPUT@%'"$output_objdir/$outputname"'%g'` - - # Delete the old output files. - $opt_dry_run || $RM $output $output_objdir/$outputname $output_objdir/lt-$outputname - - func_show_eval "$link_command" 'exit $?' - - # Now create the wrapper script. - func_verbose "creating $output" - - # Quote the relink command for shipping. - if test -n "$relink_command"; then - # Preserve any variables that may affect compiler behavior - for var in $variables_saved_for_relink; do - if eval test -z \"\${$var+set}\"; then - relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command" - elif eval var_value=\$$var; test -z "$var_value"; then - relink_command="$var=; export $var; $relink_command" - else - func_quote_for_eval "$var_value" - relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command" - fi - done - relink_command="(cd `pwd`; $relink_command)" - relink_command=`$ECHO "X$relink_command" | $Xsed -e "$sed_quote_subst"` - fi - - # Quote $ECHO for shipping. - if test "X$ECHO" = "X$SHELL $progpath --fallback-echo"; then - case $progpath in - [\\/]* | [A-Za-z]:[\\/]*) qecho="$SHELL $progpath --fallback-echo";; - *) qecho="$SHELL `pwd`/$progpath --fallback-echo";; - esac - qecho=`$ECHO "X$qecho" | $Xsed -e "$sed_quote_subst"` - else - qecho=`$ECHO "X$ECHO" | $Xsed -e "$sed_quote_subst"` - fi - - # Only actually do things if not in dry run mode. - $opt_dry_run || { - # win32 will think the script is a binary if it has - # a .exe suffix, so we strip it off here. - case $output in - *.exe) func_stripname '' '.exe' "$output" - output=$func_stripname_result ;; - esac - # test for cygwin because mv fails w/o .exe extensions - case $host in - *cygwin*) - exeext=.exe - func_stripname '' '.exe' "$outputname" - outputname=$func_stripname_result ;; - *) exeext= ;; - esac - case $host in - *cygwin* | *mingw* ) - func_dirname_and_basename "$output" "" "." - output_name=$func_basename_result - output_path=$func_dirname_result - cwrappersource="$output_path/$objdir/lt-$output_name.c" - cwrapper="$output_path/$output_name.exe" - $RM $cwrappersource $cwrapper - trap "$RM $cwrappersource $cwrapper; exit $EXIT_FAILURE" 1 2 15 - - func_emit_cwrapperexe_src > $cwrappersource - - # The wrapper executable is built using the $host compiler, - # because it contains $host paths and files. If cross- - # compiling, it, like the target executable, must be - # executed on the $host or under an emulation environment. - $opt_dry_run || { - $LTCC $LTCFLAGS -o $cwrapper $cwrappersource - $STRIP $cwrapper - } - - # Now, create the wrapper script for func_source use: - func_ltwrapper_scriptname $cwrapper - $RM $func_ltwrapper_scriptname_result - trap "$RM $func_ltwrapper_scriptname_result; exit $EXIT_FAILURE" 1 2 15 - $opt_dry_run || { - # note: this script will not be executed, so do not chmod. - if test "x$build" = "x$host" ; then - $cwrapper --lt-dump-script > $func_ltwrapper_scriptname_result - else - func_emit_wrapper no > $func_ltwrapper_scriptname_result - fi - } - ;; - * ) - $RM $output - trap "$RM $output; exit $EXIT_FAILURE" 1 2 15 - - func_emit_wrapper no > $output - chmod +x $output - ;; - esac - } - exit $EXIT_SUCCESS - ;; - esac - - # See if we need to build an old-fashioned archive. - for oldlib in $oldlibs; do - - if test "$build_libtool_libs" = convenience; then - oldobjs="$libobjs_save $symfileobj" - addlibs="$convenience" - build_libtool_libs=no - else - if test "$build_libtool_libs" = module; then - oldobjs="$libobjs_save" - build_libtool_libs=no - else - oldobjs="$old_deplibs $non_pic_objects" - if test "$preload" = yes && test -f "$symfileobj"; then - oldobjs="$oldobjs $symfileobj" - fi - fi - addlibs="$old_convenience" - fi - - if test -n "$addlibs"; then - gentop="$output_objdir/${outputname}x" - generated="$generated $gentop" - - func_extract_archives $gentop $addlibs - oldobjs="$oldobjs $func_extract_archives_result" - fi - - # Do each command in the archive commands. - if test -n "$old_archive_from_new_cmds" && test "$build_libtool_libs" = yes; then - cmds=$old_archive_from_new_cmds - else - - # Add any objects from preloaded convenience libraries - if test -n "$dlprefiles"; then - gentop="$output_objdir/${outputname}x" - generated="$generated $gentop" - - func_extract_archives $gentop $dlprefiles - oldobjs="$oldobjs $func_extract_archives_result" - fi - - # POSIX demands no paths to be encoded in archives. We have - # to avoid creating archives with duplicate basenames if we - # might have to extract them afterwards, e.g., when creating a - # static archive out of a convenience library, or when linking - # the entirety of a libtool archive into another (currently - # not supported by libtool). - if (for obj in $oldobjs - do - func_basename "$obj" - $ECHO "$func_basename_result" - done | sort | sort -uc >/dev/null 2>&1); then - : - else - $ECHO "copying selected object files to avoid basename conflicts..." - gentop="$output_objdir/${outputname}x" - generated="$generated $gentop" - func_mkdir_p "$gentop" - save_oldobjs=$oldobjs - oldobjs= - counter=1 - for obj in $save_oldobjs - do - func_basename "$obj" - objbase="$func_basename_result" - case " $oldobjs " in - " ") oldobjs=$obj ;; - *[\ /]"$objbase "*) - while :; do - # Make sure we don't pick an alternate name that also - # overlaps. - newobj=lt$counter-$objbase - func_arith $counter + 1 - counter=$func_arith_result - case " $oldobjs " in - *[\ /]"$newobj "*) ;; - *) if test ! -f "$gentop/$newobj"; then break; fi ;; - esac - done - func_show_eval "ln $obj $gentop/$newobj || cp $obj $gentop/$newobj" - oldobjs="$oldobjs $gentop/$newobj" - ;; - *) oldobjs="$oldobjs $obj" ;; - esac - done - fi - eval cmds=\"$old_archive_cmds\" - - func_len " $cmds" - len=$func_len_result - if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then - cmds=$old_archive_cmds - else - # the command line is too long to link in one step, link in parts - func_verbose "using piecewise archive linking..." - save_RANLIB=$RANLIB - RANLIB=: - objlist= - concat_cmds= - save_oldobjs=$oldobjs - oldobjs= - # Is there a better way of finding the last object in the list? - for obj in $save_oldobjs - do - last_oldobj=$obj - done - eval test_cmds=\"$old_archive_cmds\" - func_len " $test_cmds" - len0=$func_len_result - len=$len0 - for obj in $save_oldobjs - do - func_len " $obj" - func_arith $len + $func_len_result - len=$func_arith_result - func_append objlist " $obj" - if test "$len" -lt "$max_cmd_len"; then - : - else - # the above command should be used before it gets too long - oldobjs=$objlist - if test "$obj" = "$last_oldobj" ; then - RANLIB=$save_RANLIB - fi - test -z "$concat_cmds" || concat_cmds=$concat_cmds~ - eval concat_cmds=\"\${concat_cmds}$old_archive_cmds\" - objlist= - len=$len0 - fi - done - RANLIB=$save_RANLIB - oldobjs=$objlist - if test "X$oldobjs" = "X" ; then - eval cmds=\"\$concat_cmds\" - else - eval cmds=\"\$concat_cmds~\$old_archive_cmds\" - fi - fi - fi - func_execute_cmds "$cmds" 'exit $?' - done - - test -n "$generated" && \ - func_show_eval "${RM}r$generated" - - # Now create the libtool archive. - case $output in - *.la) - old_library= - test "$build_old_libs" = yes && old_library="$libname.$libext" - func_verbose "creating $output" - - # Preserve any variables that may affect compiler behavior - for var in $variables_saved_for_relink; do - if eval test -z \"\${$var+set}\"; then - relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command" - elif eval var_value=\$$var; test -z "$var_value"; then - relink_command="$var=; export $var; $relink_command" - else - func_quote_for_eval "$var_value" - relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command" - fi - done - # Quote the link command for shipping. - relink_command="(cd `pwd`; $SHELL $progpath $preserve_args --mode=relink $libtool_args @inst_prefix_dir@)" - relink_command=`$ECHO "X$relink_command" | $Xsed -e "$sed_quote_subst"` - if test "$hardcode_automatic" = yes ; then - relink_command= - fi - - # Only create the output if not a dry run. - $opt_dry_run || { - for installed in no yes; do - if test "$installed" = yes; then - if test -z "$install_libdir"; then - break - fi - output="$output_objdir/$outputname"i - # Replace all uninstalled libtool libraries with the installed ones - newdependency_libs= - for deplib in $dependency_libs; do - case $deplib in - *.la) - func_basename "$deplib" - name="$func_basename_result" - eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib` - test -z "$libdir" && \ - func_fatal_error "\`$deplib' is not a valid libtool archive" - newdependency_libs="$newdependency_libs $libdir/$name" - ;; - *) newdependency_libs="$newdependency_libs $deplib" ;; - esac - done - dependency_libs="$newdependency_libs" - newdlfiles= - - for lib in $dlfiles; do - case $lib in - *.la) - func_basename "$lib" - name="$func_basename_result" - eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib` - test -z "$libdir" && \ - func_fatal_error "\`$lib' is not a valid libtool archive" - newdlfiles="$newdlfiles $libdir/$name" - ;; - *) newdlfiles="$newdlfiles $lib" ;; - esac - done - dlfiles="$newdlfiles" - newdlprefiles= - for lib in $dlprefiles; do - case $lib in - *.la) - # Only pass preopened files to the pseudo-archive (for - # eventual linking with the app. that links it) if we - # didn't already link the preopened objects directly into - # the library: - func_basename "$lib" - name="$func_basename_result" - eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib` - test -z "$libdir" && \ - func_fatal_error "\`$lib' is not a valid libtool archive" - newdlprefiles="$newdlprefiles $libdir/$name" - ;; - esac - done - dlprefiles="$newdlprefiles" - else - newdlfiles= - for lib in $dlfiles; do - case $lib in - [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;; - *) abs=`pwd`"/$lib" ;; - esac - newdlfiles="$newdlfiles $abs" - done - dlfiles="$newdlfiles" - newdlprefiles= - for lib in $dlprefiles; do - case $lib in - [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;; - *) abs=`pwd`"/$lib" ;; - esac - newdlprefiles="$newdlprefiles $abs" - done - dlprefiles="$newdlprefiles" - fi - $RM $output - # place dlname in correct position for cygwin - tdlname=$dlname - case $host,$output,$installed,$module,$dlname in - *cygwin*,*lai,yes,no,*.dll | *mingw*,*lai,yes,no,*.dll | *cegcc*,*lai,yes,no,*.dll) tdlname=../bin/$dlname ;; - esac - $ECHO > $output "\ -# $outputname - a libtool library file -# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION -# -# Please DO NOT delete this file! -# It is necessary for linking the library. - -# The name that we can dlopen(3). -dlname='$tdlname' - -# Names of this library. -library_names='$library_names' - -# The name of the static archive. -old_library='$old_library' - -# Linker flags that can not go in dependency_libs. -inherited_linker_flags='$new_inherited_linker_flags' - -# Libraries that this one depends upon. -dependency_libs='$dependency_libs' - -# Names of additional weak libraries provided by this library -weak_library_names='$weak_libs' - -# Version information for $libname. -current=$current -age=$age -revision=$revision - -# Is this an already installed library? -installed=$installed - -# Should we warn about portability when linking against -modules? -shouldnotlink=$module - -# Files to dlopen/dlpreopen -dlopen='$dlfiles' -dlpreopen='$dlprefiles' - -# Directory that this library needs to be installed in: -libdir='$install_libdir'" - if test "$installed" = no && test "$need_relink" = yes; then - $ECHO >> $output "\ -relink_command=\"$relink_command\"" - fi - done - } - - # Do a symbolic link so that the libtool archive can be found in - # LD_LIBRARY_PATH before the program is installed. - func_show_eval '( cd "$output_objdir" && $RM "$outputname" && $LN_S "../$outputname" "$outputname" )' 'exit $?' - ;; - esac - exit $EXIT_SUCCESS -} - -{ test "$mode" = link || test "$mode" = relink; } && - func_mode_link ${1+"$@"} - - -# func_mode_uninstall arg... -func_mode_uninstall () -{ - $opt_debug - RM="$nonopt" - files= - rmforce= - exit_status=0 - - # This variable tells wrapper scripts just to set variables rather - # than running their programs. - libtool_install_magic="$magic" - - for arg - do - case $arg in - -f) RM="$RM $arg"; rmforce=yes ;; - -*) RM="$RM $arg" ;; - *) files="$files $arg" ;; - esac - done - - test -z "$RM" && \ - func_fatal_help "you must specify an RM program" - - rmdirs= - - origobjdir="$objdir" - for file in $files; do - func_dirname "$file" "" "." - dir="$func_dirname_result" - if test "X$dir" = X.; then - objdir="$origobjdir" - else - objdir="$dir/$origobjdir" - fi - func_basename "$file" - name="$func_basename_result" - test "$mode" = uninstall && objdir="$dir" - - # Remember objdir for removal later, being careful to avoid duplicates - if test "$mode" = clean; then - case " $rmdirs " in - *" $objdir "*) ;; - *) rmdirs="$rmdirs $objdir" ;; - esac - fi - - # Don't error if the file doesn't exist and rm -f was used. - if { test -L "$file"; } >/dev/null 2>&1 || - { test -h "$file"; } >/dev/null 2>&1 || - test -f "$file"; then - : - elif test -d "$file"; then - exit_status=1 - continue - elif test "$rmforce" = yes; then - continue - fi - - rmfiles="$file" - - case $name in - *.la) - # Possibly a libtool archive, so verify it. - if func_lalib_p "$file"; then - func_source $dir/$name - - # Delete the libtool libraries and symlinks. - for n in $library_names; do - rmfiles="$rmfiles $objdir/$n" - done - test -n "$old_library" && rmfiles="$rmfiles $objdir/$old_library" - - case "$mode" in - clean) - case " $library_names " in - # " " in the beginning catches empty $dlname - *" $dlname "*) ;; - *) rmfiles="$rmfiles $objdir/$dlname" ;; - esac - test -n "$libdir" && rmfiles="$rmfiles $objdir/$name $objdir/${name}i" - ;; - uninstall) - if test -n "$library_names"; then - # Do each command in the postuninstall commands. - func_execute_cmds "$postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1' - fi - - if test -n "$old_library"; then - # Do each command in the old_postuninstall commands. - func_execute_cmds "$old_postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1' - fi - # FIXME: should reinstall the best remaining shared library. - ;; - esac - fi - ;; - - *.lo) - # Possibly a libtool object, so verify it. - if func_lalib_p "$file"; then - - # Read the .lo file - func_source $dir/$name - - # Add PIC object to the list of files to remove. - if test -n "$pic_object" && - test "$pic_object" != none; then - rmfiles="$rmfiles $dir/$pic_object" - fi - - # Add non-PIC object to the list of files to remove. - if test -n "$non_pic_object" && - test "$non_pic_object" != none; then - rmfiles="$rmfiles $dir/$non_pic_object" - fi - fi - ;; - - *) - if test "$mode" = clean ; then - noexename=$name - case $file in - *.exe) - func_stripname '' '.exe' "$file" - file=$func_stripname_result - func_stripname '' '.exe' "$name" - noexename=$func_stripname_result - # $file with .exe has already been added to rmfiles, - # add $file without .exe - rmfiles="$rmfiles $file" - ;; - esac - # Do a test to see if this is a libtool program. - if func_ltwrapper_p "$file"; then - if func_ltwrapper_executable_p "$file"; then - func_ltwrapper_scriptname "$file" - relink_command= - func_source $func_ltwrapper_scriptname_result - rmfiles="$rmfiles $func_ltwrapper_scriptname_result" - else - relink_command= - func_source $dir/$noexename - fi - - # note $name still contains .exe if it was in $file originally - # as does the version of $file that was added into $rmfiles - rmfiles="$rmfiles $objdir/$name $objdir/${name}S.${objext}" - if test "$fast_install" = yes && test -n "$relink_command"; then - rmfiles="$rmfiles $objdir/lt-$name" - fi - if test "X$noexename" != "X$name" ; then - rmfiles="$rmfiles $objdir/lt-${noexename}.c" - fi - fi - fi - ;; - esac - func_show_eval "$RM $rmfiles" 'exit_status=1' - done - objdir="$origobjdir" - - # Try to remove the ${objdir}s in the directories where we deleted files - for dir in $rmdirs; do - if test -d "$dir"; then - func_show_eval "rmdir $dir >/dev/null 2>&1" - fi - done - - exit $exit_status -} - -{ test "$mode" = uninstall || test "$mode" = clean; } && - func_mode_uninstall ${1+"$@"} - -test -z "$mode" && { - help="$generic_help" - func_fatal_help "you must specify a MODE" -} - -test -z "$exec_cmd" && \ - func_fatal_help "invalid operation mode \`$mode'" - -if test -n "$exec_cmd"; then - eval exec "$exec_cmd" - exit $EXIT_FAILURE -fi - -exit $exit_status - - -# The TAGs below are defined such that we never get into a situation -# in which we disable both kinds of libraries. Given conflicting -# choices, we go for a static library, that is the most portable, -# since we can't tell whether shared libraries were disabled because -# the user asked for that or because the platform doesn't support -# them. This is particularly important on AIX, because we don't -# support having both static and shared libraries enabled at the same -# time on that platform, so we default to a shared-only configuration. -# If a disable-shared tag is given, we'll fallback to a static-only -# configuration. But we'll never go from static-only to shared-only. - -# ### BEGIN LIBTOOL TAG CONFIG: disable-shared -build_libtool_libs=no -build_old_libs=yes -# ### END LIBTOOL TAG CONFIG: disable-shared - -# ### BEGIN LIBTOOL TAG CONFIG: disable-static -build_old_libs=`case $build_libtool_libs in yes) echo no;; *) echo yes;; esac` -# ### END LIBTOOL TAG CONFIG: disable-static - -# Local Variables: -# mode:shell-script -# sh-indentation:2 -# End: -# vi:sw=2 - diff --git a/xbmc/lib/cpluff-0.1.3/po/Makefile.in b/xbmc/lib/cpluff-0.1.3/po/Makefile.in deleted file mode 100644 index 2dc9e89552..0000000000 --- a/xbmc/lib/cpluff-0.1.3/po/Makefile.in +++ /dev/null @@ -1,403 +0,0 @@ -# Makefile for PO directory in any package using GNU gettext. -# Copyright (C) 1995-1997, 2000-2006 by Ulrich Drepper <drepper@gnu.ai.mit.edu> -# -# This file can be copied and used freely without restrictions. It can -# be used in projects which are not available under the GNU General Public -# License but which still want to provide support for the GNU gettext -# functionality. -# Please note that the actual code of GNU gettext is covered by the GNU -# General Public License and is *not* in the public domain. -# -# Origin: gettext-0.16 - -PACKAGE = cpluff -VERSION = 0.1.3 -PACKAGE_BUGREPORT = johannes.lehtinen@iki.fi - -SHELL = /bin/sh - - -srcdir = . -top_srcdir = .. - - -prefix = /usr/local -exec_prefix = ${prefix} -datarootdir = ${prefix}/share -datadir = ${datarootdir} -localedir = ${datarootdir}/locale -gettextsrcdir = $(datadir)/gettext/po - -INSTALL = /usr/bin/install -c -INSTALL_DATA = ${INSTALL} -m 644 - -# We use $(mkdir_p). -# In automake <= 1.9.x, $(mkdir_p) is defined either as "mkdir -p --" or as -# "$(mkinstalldirs)" or as "$(install_sh) -d". For these automake versions, -# ${SHELL} /home/alasdair/code/git-xbmc/xbmc/lib/cpluff-0.1.3/auxliary/install-sh does not start with $(SHELL), so we add it. -# In automake >= 1.10, /bin/mkdir -p is derived from ${MKDIR_P}, which is defined -# either as "/path/to/mkdir -p" or ".../install-sh -c -d". For these automake -# versions, $(mkinstalldirs) and $(install_sh) are unused. -mkinstalldirs = $(SHELL) ${SHELL} /home/alasdair/code/git-xbmc/xbmc/lib/cpluff-0.1.3/auxliary/install-sh -d -install_sh = $(SHELL) ${SHELL} /home/alasdair/code/git-xbmc/xbmc/lib/cpluff-0.1.3/auxliary/install-sh -MKDIR_P = /bin/mkdir -p -mkdir_p = /bin/mkdir -p - -GMSGFMT_ = /usr/bin/msgfmt -GMSGFMT_no = /usr/bin/msgfmt -GMSGFMT_yes = /usr/bin/msgfmt -GMSGFMT = $(GMSGFMT_$(USE_MSGCTXT)) -MSGFMT_ = /usr/bin/msgfmt -MSGFMT_no = /usr/bin/msgfmt -MSGFMT_yes = /usr/bin/msgfmt -MSGFMT = $(MSGFMT_$(USE_MSGCTXT)) -XGETTEXT_ = /usr/bin/xgettext -XGETTEXT_no = /usr/bin/xgettext -XGETTEXT_yes = /usr/bin/xgettext -XGETTEXT = $(XGETTEXT_$(USE_MSGCTXT)) -MSGMERGE = msgmerge -MSGMERGE_UPDATE = /usr/bin/msgmerge --update -MSGINIT = msginit -MSGCONV = msgconv -MSGFILTER = msgfilter - -POFILES = @POFILES@ -GMOFILES = @GMOFILES@ -UPDATEPOFILES = @UPDATEPOFILES@ -DUMMYPOFILES = @DUMMYPOFILES@ -DISTFILES.common = Makefile.in.in remove-potcdate.sin \ -$(DISTFILES.common.extra1) $(DISTFILES.common.extra2) $(DISTFILES.common.extra3) -DISTFILES = $(DISTFILES.common) Makevars POTFILES.in \ -$(POFILES) $(GMOFILES) \ -$(DISTFILES.extra1) $(DISTFILES.extra2) $(DISTFILES.extra3) - -POTFILES = \ - -CATALOGS = @CATALOGS@ - -# Makevars gets inserted here. (Don't remove this line!) - -.SUFFIXES: -.SUFFIXES: .po .gmo .mo .sed .sin .nop .po-create .po-update - -.po.mo: - @echo "$(MSGFMT) -c -o $@ $<"; \ - $(MSGFMT) -c -o t-$@ $< && mv t-$@ $@ - -.po.gmo: - @lang=`echo $* | sed -e 's,.*/,,'`; \ - test "$(srcdir)" = . && cdcmd="" || cdcmd="cd $(srcdir) && "; \ - echo "$${cdcmd}rm -f $${lang}.gmo && $(GMSGFMT) -c --statistics -o $${lang}.gmo $${lang}.po"; \ - cd $(srcdir) && rm -f $${lang}.gmo && $(GMSGFMT) -c --statistics -o t-$${lang}.gmo $${lang}.po && mv t-$${lang}.gmo $${lang}.gmo - -.sin.sed: - sed -e '/^#/d' $< > t-$@ - mv t-$@ $@ - - -all: all-yes - -all-yes: stamp-po -all-no: - -# $(srcdir)/$(DOMAIN).pot is only created when needed. When xgettext finds no -# internationalized messages, no $(srcdir)/$(DOMAIN).pot is created (because -# we don't want to bother translators with empty POT files). We assume that -# LINGUAS is empty in this case, i.e. $(POFILES) and $(GMOFILES) are empty. -# In this case, stamp-po is a nop (i.e. a phony target). - -# stamp-po is a timestamp denoting the last time at which the CATALOGS have -# been loosely updated. Its purpose is that when a developer or translator -# checks out the package via CVS, and the $(DOMAIN).pot file is not in CVS, -# "make" will update the $(DOMAIN).pot and the $(CATALOGS), but subsequent -# invocations of "make" will do nothing. This timestamp would not be necessary -# if updating the $(CATALOGS) would always touch them; however, the rule for -# $(POFILES) has been designed to not touch files that don't need to be -# changed. -stamp-po: $(srcdir)/$(DOMAIN).pot - test ! -f $(srcdir)/$(DOMAIN).pot || \ - test -z "$(GMOFILES)" || $(MAKE) $(GMOFILES) - @test ! -f $(srcdir)/$(DOMAIN).pot || { \ - echo "touch stamp-po" && \ - echo timestamp > stamp-poT && \ - mv stamp-poT stamp-po; \ - } - -# Note: Target 'all' must not depend on target '$(DOMAIN).pot-update', -# otherwise packages like GCC can not be built if only parts of the source -# have been downloaded. - -# This target rebuilds $(DOMAIN).pot; it is an expensive operation. -# Note that $(DOMAIN).pot is not touched if it doesn't need to be changed. -$(DOMAIN).pot-update: $(POTFILES) $(srcdir)/POTFILES.in remove-potcdate.sed - if test -n '$(MSGID_BUGS_ADDRESS)' || test '$(PACKAGE_BUGREPORT)' = '@'PACKAGE_BUGREPORT'@'; then \ - msgid_bugs_address='$(MSGID_BUGS_ADDRESS)'; \ - else \ - msgid_bugs_address='$(PACKAGE_BUGREPORT)'; \ - fi; \ - $(XGETTEXT) --default-domain=$(DOMAIN) --directory=$(top_srcdir) \ - --add-comments=TRANSLATORS: $(XGETTEXT_OPTIONS) \ - --files-from=$(srcdir)/POTFILES.in \ - --copyright-holder='$(COPYRIGHT_HOLDER)' \ - --msgid-bugs-address="$$msgid_bugs_address" - test ! -f $(DOMAIN).po || { \ - if test -f $(srcdir)/$(DOMAIN).pot; then \ - sed -f remove-potcdate.sed < $(srcdir)/$(DOMAIN).pot > $(DOMAIN).1po && \ - sed -f remove-potcdate.sed < $(DOMAIN).po > $(DOMAIN).2po && \ - if cmp $(DOMAIN).1po $(DOMAIN).2po >/dev/null 2>&1; then \ - rm -f $(DOMAIN).1po $(DOMAIN).2po $(DOMAIN).po; \ - else \ - rm -f $(DOMAIN).1po $(DOMAIN).2po $(srcdir)/$(DOMAIN).pot && \ - mv $(DOMAIN).po $(srcdir)/$(DOMAIN).pot; \ - fi; \ - else \ - mv $(DOMAIN).po $(srcdir)/$(DOMAIN).pot; \ - fi; \ - } - -# This rule has no dependencies: we don't need to update $(DOMAIN).pot at -# every "make" invocation, only create it when it is missing. -# Only "make $(DOMAIN).pot-update" or "make dist" will force an update. -$(srcdir)/$(DOMAIN).pot: - $(MAKE) $(DOMAIN).pot-update - -# This target rebuilds a PO file if $(DOMAIN).pot has changed. -# Note that a PO file is not touched if it doesn't need to be changed. -$(POFILES): $(srcdir)/$(DOMAIN).pot - @lang=`echo $@ | sed -e 's,.*/,,' -e 's/\.po$$//'`; \ - if test -f "$(srcdir)/$${lang}.po"; then \ - test "$(srcdir)" = . && cdcmd="" || cdcmd="cd $(srcdir) && "; \ - echo "$${cdcmd}$(MSGMERGE_UPDATE) $${lang}.po $(DOMAIN).pot"; \ - cd $(srcdir) && $(MSGMERGE_UPDATE) $${lang}.po $(DOMAIN).pot; \ - else \ - $(MAKE) $${lang}.po-create; \ - fi - - -install: install-exec install-data -install-exec: -install-data: install-data-yes - if test "$(PACKAGE)" = "gettext-tools"; then \ - $(mkdir_p) $(DESTDIR)$(gettextsrcdir); \ - for file in $(DISTFILES.common) Makevars.template; do \ - $(INSTALL_DATA) $(srcdir)/$$file \ - $(DESTDIR)$(gettextsrcdir)/$$file; \ - done; \ - for file in Makevars; do \ - rm -f $(DESTDIR)$(gettextsrcdir)/$$file; \ - done; \ - else \ - : ; \ - fi -install-data-no: all -install-data-yes: all - $(mkdir_p) $(DESTDIR)$(datadir) - @catalogs='$(CATALOGS)'; \ - for cat in $$catalogs; do \ - cat=`basename $$cat`; \ - lang=`echo $$cat | sed -e 's/\.gmo$$//'`; \ - dir=$(localedir)/$$lang/LC_MESSAGES; \ - $(mkdir_p) $(DESTDIR)$$dir; \ - if test -r $$cat; then realcat=$$cat; else realcat=$(srcdir)/$$cat; fi; \ - $(INSTALL_DATA) $$realcat $(DESTDIR)$$dir/$(DOMAIN).mo; \ - echo "installing $$realcat as $(DESTDIR)$$dir/$(DOMAIN).mo"; \ - for lc in '' $(EXTRA_LOCALE_CATEGORIES); do \ - if test -n "$$lc"; then \ - if (cd $(DESTDIR)$(localedir)/$$lang && LC_ALL=C ls -l -d $$lc 2>/dev/null) | grep ' -> ' >/dev/null; then \ - link=`cd $(DESTDIR)$(localedir)/$$lang && LC_ALL=C ls -l -d $$lc | sed -e 's/^.* -> //'`; \ - mv $(DESTDIR)$(localedir)/$$lang/$$lc $(DESTDIR)$(localedir)/$$lang/$$lc.old; \ - mkdir $(DESTDIR)$(localedir)/$$lang/$$lc; \ - (cd $(DESTDIR)$(localedir)/$$lang/$$lc.old && \ - for file in *; do \ - if test -f $$file; then \ - ln -s ../$$link/$$file $(DESTDIR)$(localedir)/$$lang/$$lc/$$file; \ - fi; \ - done); \ - rm -f $(DESTDIR)$(localedir)/$$lang/$$lc.old; \ - else \ - if test -d $(DESTDIR)$(localedir)/$$lang/$$lc; then \ - :; \ - else \ - rm -f $(DESTDIR)$(localedir)/$$lang/$$lc; \ - mkdir $(DESTDIR)$(localedir)/$$lang/$$lc; \ - fi; \ - fi; \ - rm -f $(DESTDIR)$(localedir)/$$lang/$$lc/$(DOMAIN).mo; \ - ln -s ../LC_MESSAGES/$(DOMAIN).mo $(DESTDIR)$(localedir)/$$lang/$$lc/$(DOMAIN).mo 2>/dev/null || \ - ln $(DESTDIR)$(localedir)/$$lang/LC_MESSAGES/$(DOMAIN).mo $(DESTDIR)$(localedir)/$$lang/$$lc/$(DOMAIN).mo 2>/dev/null || \ - cp -p $(DESTDIR)$(localedir)/$$lang/LC_MESSAGES/$(DOMAIN).mo $(DESTDIR)$(localedir)/$$lang/$$lc/$(DOMAIN).mo; \ - echo "installing $$realcat link as $(DESTDIR)$(localedir)/$$lang/$$lc/$(DOMAIN).mo"; \ - fi; \ - done; \ - done - -install-strip: install - -installdirs: installdirs-exec installdirs-data -installdirs-exec: -installdirs-data: installdirs-data-yes - if test "$(PACKAGE)" = "gettext-tools"; then \ - $(mkdir_p) $(DESTDIR)$(gettextsrcdir); \ - else \ - : ; \ - fi -installdirs-data-no: -installdirs-data-yes: - $(mkdir_p) $(DESTDIR)$(datadir) - @catalogs='$(CATALOGS)'; \ - for cat in $$catalogs; do \ - cat=`basename $$cat`; \ - lang=`echo $$cat | sed -e 's/\.gmo$$//'`; \ - dir=$(localedir)/$$lang/LC_MESSAGES; \ - $(mkdir_p) $(DESTDIR)$$dir; \ - for lc in '' $(EXTRA_LOCALE_CATEGORIES); do \ - if test -n "$$lc"; then \ - if (cd $(DESTDIR)$(localedir)/$$lang && LC_ALL=C ls -l -d $$lc 2>/dev/null) | grep ' -> ' >/dev/null; then \ - link=`cd $(DESTDIR)$(localedir)/$$lang && LC_ALL=C ls -l -d $$lc | sed -e 's/^.* -> //'`; \ - mv $(DESTDIR)$(localedir)/$$lang/$$lc $(DESTDIR)$(localedir)/$$lang/$$lc.old; \ - mkdir $(DESTDIR)$(localedir)/$$lang/$$lc; \ - (cd $(DESTDIR)$(localedir)/$$lang/$$lc.old && \ - for file in *; do \ - if test -f $$file; then \ - ln -s ../$$link/$$file $(DESTDIR)$(localedir)/$$lang/$$lc/$$file; \ - fi; \ - done); \ - rm -f $(DESTDIR)$(localedir)/$$lang/$$lc.old; \ - else \ - if test -d $(DESTDIR)$(localedir)/$$lang/$$lc; then \ - :; \ - else \ - rm -f $(DESTDIR)$(localedir)/$$lang/$$lc; \ - mkdir $(DESTDIR)$(localedir)/$$lang/$$lc; \ - fi; \ - fi; \ - fi; \ - done; \ - done - -# Define this as empty until I found a useful application. -installcheck: - -uninstall: uninstall-exec uninstall-data -uninstall-exec: -uninstall-data: uninstall-data-yes - if test "$(PACKAGE)" = "gettext-tools"; then \ - for file in $(DISTFILES.common) Makevars.template; do \ - rm -f $(DESTDIR)$(gettextsrcdir)/$$file; \ - done; \ - else \ - : ; \ - fi -uninstall-data-no: -uninstall-data-yes: - catalogs='$(CATALOGS)'; \ - for cat in $$catalogs; do \ - cat=`basename $$cat`; \ - lang=`echo $$cat | sed -e 's/\.gmo$$//'`; \ - for lc in LC_MESSAGES $(EXTRA_LOCALE_CATEGORIES); do \ - rm -f $(DESTDIR)$(localedir)/$$lang/$$lc/$(DOMAIN).mo; \ - done; \ - done - -check: all - -info dvi ps pdf html tags TAGS ctags CTAGS ID: - -mostlyclean: - rm -f remove-potcdate.sed - rm -f stamp-poT - rm -f core core.* $(DOMAIN).po $(DOMAIN).1po $(DOMAIN).2po *.new.po - rm -fr *.o - -clean: mostlyclean - -distclean: clean - rm -f Makefile Makefile.in POTFILES *.mo - -maintainer-clean: distclean - @echo "This command is intended for maintainers to use;" - @echo "it deletes files that may require special tools to rebuild." - rm -f stamp-po $(GMOFILES) - -distdir = $(top_builddir)/$(PACKAGE)-$(VERSION)/$(subdir) -dist distdir: - $(MAKE) update-po - @$(MAKE) dist2 -# This is a separate target because 'update-po' must be executed before. -dist2: stamp-po $(DISTFILES) - dists="$(DISTFILES)"; \ - if test "$(PACKAGE)" = "gettext-tools"; then \ - dists="$$dists Makevars.template"; \ - fi; \ - if test -f $(srcdir)/$(DOMAIN).pot; then \ - dists="$$dists $(DOMAIN).pot stamp-po"; \ - fi; \ - if test -f $(srcdir)/ChangeLog; then \ - dists="$$dists ChangeLog"; \ - fi; \ - for i in 0 1 2 3 4 5 6 7 8 9; do \ - if test -f $(srcdir)/ChangeLog.$$i; then \ - dists="$$dists ChangeLog.$$i"; \ - fi; \ - done; \ - if test -f $(srcdir)/LINGUAS; then dists="$$dists LINGUAS"; fi; \ - for file in $$dists; do \ - if test -f $$file; then \ - cp -p $$file $(distdir) || exit 1; \ - else \ - cp -p $(srcdir)/$$file $(distdir) || exit 1; \ - fi; \ - done - -update-po: Makefile - $(MAKE) $(DOMAIN).pot-update - test -z "$(UPDATEPOFILES)" || $(MAKE) $(UPDATEPOFILES) - $(MAKE) update-gmo - -# General rule for creating PO files. - -.nop.po-create: - @lang=`echo $@ | sed -e 's/\.po-create$$//'`; \ - echo "File $$lang.po does not exist. If you are a translator, you can create it through 'msginit'." 1>&2; \ - exit 1 - -# General rule for updating PO files. - -.nop.po-update: - @lang=`echo $@ | sed -e 's/\.po-update$$//'`; \ - if test "$(PACKAGE)" = "gettext-tools"; then PATH=`pwd`/../src:$$PATH; fi; \ - tmpdir=`pwd`; \ - echo "$$lang:"; \ - test "$(srcdir)" = . && cdcmd="" || cdcmd="cd $(srcdir) && "; \ - echo "$${cdcmd}$(MSGMERGE) $$lang.po $(DOMAIN).pot -o $$lang.new.po"; \ - cd $(srcdir); \ - if $(MSGMERGE) $$lang.po $(DOMAIN).pot -o $$tmpdir/$$lang.new.po; then \ - if cmp $$lang.po $$tmpdir/$$lang.new.po >/dev/null 2>&1; then \ - rm -f $$tmpdir/$$lang.new.po; \ - else \ - if mv -f $$tmpdir/$$lang.new.po $$lang.po; then \ - :; \ - else \ - echo "msgmerge for $$lang.po failed: cannot move $$tmpdir/$$lang.new.po to $$lang.po" 1>&2; \ - exit 1; \ - fi; \ - fi; \ - else \ - echo "msgmerge for $$lang.po failed!" 1>&2; \ - rm -f $$tmpdir/$$lang.new.po; \ - fi - -$(DUMMYPOFILES): - -update-gmo: Makefile $(GMOFILES) - @: - -Makefile: Makefile.in.in Makevars $(top_builddir)/config.status @POMAKEFILEDEPS@ - cd $(top_builddir) \ - && $(SHELL) ./config.status $(subdir)/$@.in po-directories - -force: - -# Tell versions [3.59,3.63) of GNU make not to export all variables. -# Otherwise a system limit (for SysV at least) may be exceeded. -.NOEXPORT: diff --git a/xbmc/lib/cpluff-0.1.3/po/POTFILES b/xbmc/lib/cpluff-0.1.3/po/POTFILES deleted file mode 100644 index 706603894a..0000000000 --- a/xbmc/lib/cpluff-0.1.3/po/POTFILES +++ /dev/null @@ -1,16 +0,0 @@ - ../console/cmdinput_basic.c \ - ../console/cmdinput_readline.c \ - ../console/console.c \ - ../libcpluff/context.c \ - ../libcpluff/cpluff.c \ - ../libcpluff/logging.c \ - ../libcpluff/pcontrol.c \ - ../libcpluff/pinfo.c \ - ../libcpluff/ploader.c \ - ../libcpluff/pscan.c \ - ../libcpluff/psymbol.c \ - ../libcpluff/serial.c \ - ../libcpluff/thread_posix.c \ - ../libcpluff/thread_windows.c \ - ../libcpluff/util.c \ - ../loader/loader.c diff --git a/xbmc/lib/cpluff-0.1.3/stamp-h1 b/xbmc/lib/cpluff-0.1.3/stamp-h1 deleted file mode 100644 index 4547fe1b5e..0000000000 --- a/xbmc/lib/cpluff-0.1.3/stamp-h1 +++ /dev/null @@ -1 +0,0 @@ -timestamp for config.h diff --git a/xbmc/visualizations/Goom/net.sf.goom.vis/description.xml b/xbmc/visualizations/Goom/net.sf.goom.vis/description.xml new file mode 100644 index 0000000000..9c5137312c --- /dev/null +++ b/xbmc/visualizations/Goom/net.sf.goom.vis/description.xml @@ -0,0 +1,19 @@ +<?xml version="1.0" encoding="UTF-8" standalone="yes"?> +<addoninfo> + <id>net.sf.goom.vis</id> + <type>visualization</type> + <title>Goom</title> + <library>Goom.vis</library> + <librarywin32>Goom_win32.vis</librarywin32> + <version>1.0.0</version> + <platforms> + <platform>windows</platform> + <platform>linux</platform> + </platforms> + <minversion> + <xbmc>28344</xbmc> + </minversion> + <summary>Visualization</summary> + <description>Goom visual effects generator</description> + <author>TEAMXBMC</author> +</addoninfo> |