Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 45 additions & 0 deletions .github/workflows/coverage.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
name: Test Coverage

on: [pull_request, workflow_dispatch]

jobs:
build:
name: Report Test Coverage
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v4
with:
submodules: true

- name: Create Build Environment
run: cmake -E make_directory ${{github.workspace}}/build

- name: Setup LCOV
uses: hrishikesh-kadam/setup-lcov@v1

- name: Configure CMake
shell: bash
working-directory: ${{github.workspace}}/build
run: cmake ..

- name: Build
working-directory: ${{github.workspace}}/build
shell: bash
run: cmake --build .

- name: Prepare coverage data
working-directory: ${{github.workspace}}/build
shell: bash
run: cmake --build . --target cov_data

- name: Report code coverage
uses: zgosalvez/github-actions-report-lcov@v3
with:
coverage-files: build/cov.info.cleaned
minimum-coverage: 30
artifact-name: code-coverage-report
github-token: ${{ secrets.GITHUB_TOKEN }}
working-directory: ${{github.workspace}}
update-comment: true

81 changes: 81 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -245,3 +245,84 @@ target_link_libraries(${UT_PROJECT_NAME} ${PROJECT_NAME})

add_test(NAME ${UT_PROJECT_NAME} COMMAND GameAnalyticsUnitTests)

# --------------------------- Google Test Setup --------------------------- #

find_program(GCOV_PATH gcov)
if (NOT GCOV_PATH)
message(WARNING "program gcov not found")
endif()

find_program(LCOV_PATH lcov)
if (NOT LCOV_PATH)
message(WARNING "program lcov not found")
endif()

find_program(GENHTML_PATH genhtml)
if (NOT GENHTML_PATH)
message(WARNING "program genhtml not found")
endif()

if (LCOV_PATH AND GCOV_PATH)

target_compile_options(
GameAnalytics
PRIVATE
-g -O0 -fprofile-arcs -ftest-coverage
)

target_link_libraries(
GameAnalytics
PRIVATE
--coverage
)

set(covname cov)

add_custom_target(cov_data
# Cleanup lcov
COMMENT "Resetting code coverage counters to zero."
${LCOV_PATH} --directory . --zerocounters

# Run tests
COMMAND GameAnalyticsUnitTests

# Capturing lcov counters and generating report
COMMAND ${LCOV_PATH} --directory . --capture --output-file ${covname}.info
COMMAND ${LCOV_PATH} --remove ${covname}.info
'${CMAKE_SOURCE_DIR}/source/dependencies/*'
'${CMAKE_SOURCE_DIR}/test/*'
'/usr/*'
'/Applications/Xcode.app/*'
--output-file ${covname}.info.cleaned
)

if (GENHTML_PATH)
add_custom_target(cov

# Cleanup lcov
${LCOV_PATH} --directory . --zerocounters

# Run tests
COMMAND GameAnalyticsUnitTests

# Capturing lcov counters and generating report
COMMAND ${LCOV_PATH} --directory . --capture --output-file ${covname}.info --rc lcov_branch_coverage=1 --rc derive_function_end_line=0
COMMAND ${LCOV_PATH} --remove ${covname}.info
'${CMAKE_SOURCE_DIR}/source/dependencies/*'
'/usr/*'
--output-file ${covname}.info.cleaned
--rc lcov_branch_coverage=1
--rc derive_function_end_line=0
COMMAND ${GENHTML_PATH} -o ${covname} ${covname}.info.cleaned --rc lcov_branch_coverage=1 --rc derive_function_end_line=0
COMMAND ${CMAKE_COMMAND} -E remove ${covname}.info ${covname}.info.cleaned

COMMENT "Resetting code coverage counters to zero.\nProcessing code coverage counters and generating report."
)
else()
message(WARNING "unable to generate coverage report: missing genhtml")
endif()

else()
message(WARNING "unable to add coverage targets: missing coverage tools")
endif()

2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
def run_command(command, shell=True, cwd=None):
if os.name == 'nt': # Check if the OS is Windows
command = f'powershell.exe -Command "{command}"'
result = subprocess.run(command, shell=shell, check=True, text=True, cwd=cwd)
result = subprocess.run(command, shell=shell, check=True, text=True)
return result

def main():
Expand Down
7 changes: 4 additions & 3 deletions source/gameanalytics/GAHealth.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,11 @@ namespace gameanalytics
if((memory > 0) && (_totalMemory > 0))
{
int memoryPercent = std::round(static_cast<double>(memory) / static_cast<double>(_totalMemory) * 100.0);
return memoryPercent;
return std::min(memoryPercent, 100);
}

return -1;
return 0;

}

void GAHealth::doAppMemoryReading(int64_t memory)
Expand Down Expand Up @@ -172,4 +173,4 @@ namespace gameanalytics

}
}
}
}
209 changes: 209 additions & 0 deletions test/GAHealth_test.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,209 @@
#include <gtest/gtest.h>
#include <gmock/gmock.h>
#include <GAHealth.h>
#include <nlohmann/json.hpp>

using namespace gameanalytics;
using ::testing::Return;

namespace gameanalytics
{
class MockGAPlatform : public GAPlatform
{
public:
MOCK_METHOD(std::string, getOSVersion, (), (override));
MOCK_METHOD(std::string, getDeviceManufacturer, (), (override));
MOCK_METHOD(std::string, getBuildPlatform, (), (override));
MOCK_METHOD(std::string, getPersistentPath, (), (override));
MOCK_METHOD(std::string, getDeviceModel, (), (override));
MOCK_METHOD(std::string, getConnectionType, (), (override));

// Mocking non-pure virtual methods
MOCK_METHOD(std::string, getAdvertisingId, (), (override));
MOCK_METHOD(std::string, getDeviceId, (), (override));
MOCK_METHOD(void, setupUncaughtExceptionHandler, (), (override));
MOCK_METHOD(void, onInit, (), (override));

// Mocking const methods
MOCK_METHOD(std::string, getCpuModel, (), (const, override));
MOCK_METHOD(std::string, getGpuModel, (), (const, override));
MOCK_METHOD(int, getNumCpuCores, (), (const, override));
MOCK_METHOD(int64_t, getTotalDeviceMemory, (), (const, override));
MOCK_METHOD(int64_t, getAppMemoryUsage, (), (const, override));
MOCK_METHOD(int64_t, getSysMemoryUsage, (), (const, override));
MOCK_METHOD(int64_t, getBootTime, (), (const, override));

MockGAPlatform()
{
ON_CALL(*this, getOSVersion).WillByDefault(Return("10.0"));
ON_CALL(*this, getDeviceManufacturer).WillByDefault(Return("GenericManufacturer"));
ON_CALL(*this, getBuildPlatform).WillByDefault(Return("Windows"));
ON_CALL(*this, getPersistentPath).WillByDefault(Return("/persistent/path"));
ON_CALL(*this, getDeviceModel).WillByDefault(Return("DeviceModelX"));
ON_CALL(*this, getConnectionType).WillByDefault(Return("WiFi"));

ON_CALL(*this, getAdvertisingId).WillByDefault(Return("ad-id-123"));
ON_CALL(*this, getDeviceId).WillByDefault(Return("device-id-456"));
ON_CALL(*this, setupUncaughtExceptionHandler).WillByDefault(Return());
ON_CALL(*this, onInit).WillByDefault(Return());

ON_CALL(*this, getCpuModel).WillByDefault(Return("Intel Core i7"));
ON_CALL(*this, getGpuModel).WillByDefault(Return("Nvidia GTX 1080"));
ON_CALL(*this, getNumCpuCores).WillByDefault(Return(8));
ON_CALL(*this, getTotalDeviceMemory).WillByDefault(Return(16384)); // 16GB
ON_CALL(*this, getAppMemoryUsage).WillByDefault(Return(1024)); // 1GB
ON_CALL(*this, getSysMemoryUsage).WillByDefault(Return(2048)); // 2GB
ON_CALL(*this, getBootTime).WillByDefault(Return(30000)); // 30 seconds
}
};
}


// Test subclass to access protected members
class GAHealthTestable : public gameanalytics::GAHealth
{
public:
using gameanalytics::GAHealth::GAHealth; // Inherit constructor
using gameanalytics::GAHealth::_fpsReadings; // Expose protected member for testing
using gameanalytics::GAHealth::_appMemoryUsage; // Expose protected memory usage for testing
using gameanalytics::GAHealth::_sysMemoryUsage; // Expose system memory usage for testing
using gameanalytics::GAHealth::getMemoryPercent;
using gameanalytics::GAHealth::_totalMemory;
};


class GAHealthTest : public ::testing::Test
{
protected:
MockGAPlatform* mockPlatform;
GAHealthTestable* gaHealth;

virtual void SetUp() override
{
mockPlatform = new MockGAPlatform();
gaHealth = new GAHealthTestable(mockPlatform);
}

virtual void TearDown() override
{
delete gaHealth;
delete mockPlatform;
}
};

TEST_F(GAHealthTest, ConstructorInitializesPlatform)
{
EXPECT_CALL(*mockPlatform, getCpuModel()).WillOnce(Return("Intel"));
EXPECT_CALL(*mockPlatform, getNumCpuCores()).WillOnce(Return(4));
EXPECT_CALL(*mockPlatform, getDeviceModel()).WillOnce(Return("Device123"));
EXPECT_CALL(*mockPlatform, getGpuModel()).WillOnce(Return("Nvidia"));
EXPECT_CALL(*mockPlatform, getTotalDeviceMemory()).WillOnce(Return(8192));

gameanalytics::GAHealth health(mockPlatform);
health.enableHardwareTracking = true;

json out;
health.addHealthAnnotations(out);

std::cout << std::setw(4) << out << '\n';

EXPECT_EQ(out["cpu_model"], "Intel");
EXPECT_EQ(out["cpu_num_cores"], 4);
EXPECT_EQ(out["hardware"], "Device123");
}

TEST_F(GAHealthTest, AddHealthAnnotationsIncludesHardwareTracking)
{
EXPECT_CALL(*mockPlatform, getCpuModel()).WillOnce(Return("Intel"));
EXPECT_CALL(*mockPlatform, getNumCpuCores()).WillOnce(Return(4));
EXPECT_CALL(*mockPlatform, getDeviceModel()).WillOnce(Return("Device123"));

GAHealthTestable* _localHealthTracker = new GAHealthTestable(mockPlatform);

_localHealthTracker->enableHardwareTracking = true;

json healthEvent;
_localHealthTracker->addHealthAnnotations(healthEvent);

std::cout << std::setw(4) << healthEvent["cpu_model"] << '\n';

EXPECT_EQ(healthEvent["cpu_model"], "Intel");
EXPECT_EQ(healthEvent["cpu_num_cores"], 4);
EXPECT_EQ(healthEvent["hardware"], "Device123");
}

TEST_F(GAHealthTest, DoFpsReadingIncrementsBucketCorrectly)
{
float testFps = 60.0f;

gaHealth->doFpsReading(testFps);

EXPECT_EQ(gaHealth->_fpsReadings[60], 1);
}

TEST_F(GAHealthTest, GetMemoryPercentReturnsCorrectValue)
{
int percent = gaHealth->getMemoryPercent(4096);
EXPECT_EQ(percent, 25); // 25% memory usage
}

// Test getMemoryPercent with various inputs
TEST_F(GAHealthTest, GetMemoryPercentReturnsCorrectValues)
{
// Case 1: 50% memory usage
int64_t totalMemory = 1000;
gaHealth->_totalMemory = totalMemory;
int memory = 500;
int expectedPercent = 50;
EXPECT_EQ(gaHealth->getMemoryPercent(memory), expectedPercent);

// Case 2: 100% memory usage
memory = 1000;
expectedPercent = 100;
EXPECT_EQ(gaHealth->getMemoryPercent(memory), expectedPercent);

// Case 3: 0% memory usage
memory = 1;
expectedPercent = 0;
EXPECT_EQ(gaHealth->getMemoryPercent(memory), expectedPercent);

// Case 4: More than 100% memory usage (should not happen, but edge case)
memory = 2000;
expectedPercent = 100; // Assuming 100% cap
EXPECT_EQ(gaHealth->getMemoryPercent(memory), expectedPercent);

// Case 5: Negative memory value (should return 0 or handle gracefully)
memory = -100;
expectedPercent = 0; // Assuming a negative value will result in 0%
EXPECT_EQ(gaHealth->getMemoryPercent(memory), expectedPercent);
}

TEST_F(GAHealthTest, AddPerformanceDataIncludesFPSTracking)
{
gaHealth->enableFPSTracking = true;

// Fill FPS readings with some values
gaHealth->_fpsReadings[60] = 5;
gaHealth->_fpsReadings[30] = 2;

json performanceData;
gaHealth->addPerformanceData(performanceData);


json expectedFpsData;
expectedFpsData["fps_data_table"] = gaHealth->_fpsReadings;

EXPECT_EQ(performanceData["fps_data_table"], expectedFpsData["fps_data_table"]);
}

TEST_F(GAHealthTest, AddSDKInitDataIncludesBootTime)
{
gaHealth->enableAppBootTimeTracking = true;

EXPECT_CALL(*mockPlatform, getBootTime()).WillOnce(Return(5000));

json sdkInitEvent;
gaHealth->addSDKInitData(sdkInitEvent);

EXPECT_EQ(sdkInitEvent["app_boot_time"], 5000);
}