diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-27 16:51:28 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-27 16:51:28 +0000 |
commit | 940b4d1848e8c70ab7642901a68594e8016caffc (patch) | |
tree | eb72f344ee6c3d9b80a7ecc079ea79e9fba8676d /xmloff/qa/unit/tokenmap-test.cxx | |
parent | Initial commit. (diff) | |
download | libreoffice-1ad18e38974bb28c3d98d0be8f7d8c18fc56de29.tar.xz libreoffice-1ad18e38974bb28c3d98d0be8f7d8c18fc56de29.zip |
Adding upstream version 1:7.0.4.upstream/1%7.0.4upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'xmloff/qa/unit/tokenmap-test.cxx')
-rw-r--r-- | xmloff/qa/unit/tokenmap-test.cxx | 93 |
1 files changed, 93 insertions, 0 deletions
diff --git a/xmloff/qa/unit/tokenmap-test.cxx b/xmloff/qa/unit/tokenmap-test.cxx new file mode 100644 index 000000000..476416fc3 --- /dev/null +++ b/xmloff/qa/unit/tokenmap-test.cxx @@ -0,0 +1,93 @@ +/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */ +/* + * This file is part of the LibreOffice project. + * + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. + */ + +#include <memory> +#include <cppunit/TestAssert.h> +#include <cppunit/TestFixture.h> +#include <cppunit/extensions/HelperMacros.h> + +#include <fasttokenhandler.hxx> +#include <xmloff/token/tokens.hxx> +#include <xmloff/xmltoken.hxx> + +using namespace std; +using namespace com::sun::star::uno; + +namespace xmloff { + +class TokenmapTest: public CppUnit::TestFixture +{ +public: + + TokenmapTest(); + + void test_roundTrip(); + void test_listEquality(); + + CPPUNIT_TEST_SUITE(TokenmapTest); + + CPPUNIT_TEST(test_roundTrip); + CPPUNIT_TEST(test_listEquality); + + CPPUNIT_TEST_SUITE_END(); + +private: + std::unique_ptr<token::TokenMap> pTokenMap; +}; + +TokenmapTest::TokenmapTest() : pTokenMap(new token::TokenMap) +{ +} + +void TokenmapTest::test_roundTrip() +{ + for ( sal_Int32 nToken = 0; nToken < XML_TOKEN_COUNT; ++nToken ) + { + // check that the getIdentifier <-> getToken roundtrip works + Sequence< sal_Int8 > rUtf8Name = pTokenMap->getUtf8TokenName(nToken); + CPPUNIT_ASSERT_MESSAGE("Token name sequence should not be empty", rUtf8Name.getLength()); + const char* pChar = reinterpret_cast< const char * >(rUtf8Name.getConstArray()); + CPPUNIT_ASSERT_MESSAGE("Token name sequence array pointer failed", pChar); + sal_Int32 ret = token::TokenMap::getTokenFromUTF8( pChar, rUtf8Name.getLength() ); + CPPUNIT_ASSERT_EQUAL_MESSAGE("No roundtrip for token", ret, nToken); + } +} + +void TokenmapTest::test_listEquality() +{ + //make sure the two token lists stay in sync + // This depends on same order in three places: XMLTokenEnum in include/xmloff/xmltoken.hxx, + // aTokenList in xmloff/source/core/xmltoken.cxx, and xmloff/source/token/tokens.txt + for ( sal_Int32 nToken = 0; nToken < XML_TOKEN_COUNT; ++nToken ) + { + Sequence< sal_Int8 > rUtf8Name = pTokenMap->getUtf8TokenName(nToken); + const OUString& rName = OUString( reinterpret_cast< const char* >( + rUtf8Name.getConstArray() ), rUtf8Name.getLength(), RTL_TEXTENCODING_UTF8 ); + if ( rName.endsWith("_DUMMY") ) + continue; + const OUString& rTokenName = GetXMLToken( static_cast<xmloff::token::XMLTokenEnum>(nToken) ); + CPPUNIT_ASSERT_EQUAL(rName, rTokenName); + } + + for ( sal_Int32 nToken = xmloff::token::XMLTokenEnum::XML_TOKEN_START + 1; + nToken < xmloff::token::XMLTokenEnum::XML_TOKEN_END; ++nToken ) + { + const OUString& rTokenName = GetXMLToken( static_cast<xmloff::token::XMLTokenEnum>(nToken) ); + Sequence< sal_Int8 > rUtf8Name = pTokenMap->getUtf8TokenName(nToken); + const OUString& rName = OUString( reinterpret_cast< const char* >( + rUtf8Name.getConstArray() ), rUtf8Name.getLength(), RTL_TEXTENCODING_UTF8 ); + if ( !rName.endsWith("_DUMMY") ) + CPPUNIT_ASSERT_EQUAL(rTokenName, rName); + } +} + +CPPUNIT_TEST_SUITE_REGISTRATION(TokenmapTest); + +} + |