diff --git a/app/build.gradle.kts b/app/build.gradle.kts
index afaa56012..c23215bb4 100644
--- a/app/build.gradle.kts
+++ b/app/build.gradle.kts
@@ -9,6 +9,7 @@ plugins {
alias(libs.plugins.jetbrains.serialization)
alias(libs.plugins.kotlinter)
alias(libs.plugins.ksp)
+ id("com.chaquo.python") version "15.0.1"
}
val keystorePropertiesFile = rootProject.file("app/keystores/keystore.properties")
@@ -76,8 +77,8 @@ android {
useSupportLibrary = true
}
+ // Restore the original ProGuard configuration
proguardFiles(
- // getDefaultProguardFile("proguard-android-optimize.txt"),
getDefaultProguardFile("proguard-android.txt"),
"proguard-rules.pro",
)
@@ -170,8 +171,30 @@ android {
// }
}
+chaquopy {
+ defaultConfig {
+ version = "3.11"
+ pip {
+ // Install GOGDL dependencies
+ install("requests")
+ // Use your Android-compatible fork instead of the original
+ // install("git+https://github.com/unbelievableflavour/heroic-gogdl-android.git@0.0.4")
+ }
+ }
+ sourceSets {
+ getByName("main") {
+ // Remove local Python source directory since we're using the external package
+ srcDir("src/main/python")
+ }
+ }
+}
+
dependencies {
implementation(libs.material)
+
+ // Chrome Custom Tabs for OAuth
+ implementation("androidx.browser:browser:1.8.0")
+
// JavaSteaml
val localBuild = false // Change to 'true' needed when building JavaSteam manually
if (localBuild) {
diff --git a/app/src/main/AndroidManifest.xml b/app/src/main/AndroidManifest.xml
index ee6c72c5c..92331f3f4 100644
--- a/app/src/main/AndroidManifest.xml
+++ b/app/src/main/AndroidManifest.xml
@@ -59,11 +59,22 @@
+
+
+
+
diff --git a/app/src/main/java/app/gamenative/MainActivity.kt b/app/src/main/java/app/gamenative/MainActivity.kt
index 5f19eace2..01b1866d7 100644
--- a/app/src/main/java/app/gamenative/MainActivity.kt
+++ b/app/src/main/java/app/gamenative/MainActivity.kt
@@ -30,6 +30,7 @@ import coil.memory.MemoryCache
import coil.request.CachePolicy
import app.gamenative.events.AndroidEvent
import app.gamenative.service.SteamService
+import app.gamenative.service.GOG.GOGService
import app.gamenative.ui.PluviaMain
import app.gamenative.ui.enums.Orientation
import app.gamenative.utils.AnimatedPngDecoder
@@ -223,6 +224,11 @@ class MainActivity : ComponentActivity() {
Timber.i("Stopping Steam Service")
SteamService.stop()
}
+
+ if (GOGService.isRunning && !isChangingConfigurations) {
+ Timber.i("Stopping GOG Service")
+ GOGService.stop()
+ }
}
override fun onResume() {
@@ -254,6 +260,15 @@ class MainActivity : ComponentActivity() {
Timber.i("Stopping SteamService - no active operations")
SteamService.stop()
}
+
+ // stop GOGService only if no downloads or sync are in progress
+ if (!isChangingConfigurations &&
+ GOGService.isRunning &&
+ !GOGService.hasActiveOperations()
+ ) {
+ Timber.i("Stopping GOGService - no active operations")
+ GOGService.stop()
+ }
}
// override fun onKeyDown(keyCode: Int, event: KeyEvent?): Boolean {
diff --git a/app/src/main/java/app/gamenative/PluviaApp.kt b/app/src/main/java/app/gamenative/PluviaApp.kt
index 9ba7dc87d..53772042f 100644
--- a/app/src/main/java/app/gamenative/PluviaApp.kt
+++ b/app/src/main/java/app/gamenative/PluviaApp.kt
@@ -4,6 +4,7 @@ import android.os.StrictMode
import androidx.navigation.NavController
import app.gamenative.events.EventDispatcher
import app.gamenative.service.DownloadService
+import app.gamenative.service.GOG.GOGService
import app.gamenative.service.GameManagerService
import app.gamenative.utils.IntentLaunchManager
import com.google.android.play.core.splitcompat.SplitCompatApplication
@@ -15,6 +16,8 @@ import com.winlator.widget.XServerView
import com.winlator.xenvironment.XEnvironment
import dagger.hilt.android.HiltAndroidApp
import timber.log.Timber
+import okhttp3.OkHttpClient
+import javax.inject.Inject
// Add PostHog imports
import com.posthog.android.PostHogAndroid
@@ -33,6 +36,9 @@ typealias NavChangedListener = NavController.OnDestinationChangedListener
@HiltAndroidApp
class PluviaApp : SplitCompatApplication() {
+ @Inject
+ lateinit var httpClient: OkHttpClient
+
override fun onCreate() {
super.onCreate()
@@ -92,6 +98,15 @@ class PluviaApp : SplitCompatApplication() {
} catch (e: Exception) {
Timber.e(e, "[PluviaApp]: Failed to initialize GameManagerService")
}
+
+ // Initialize GOG Service
+ try {
+ GOGService.initialize(this)
+ GOGService.setHttpClient(httpClient)
+ Timber.i("[PluviaApp]: GOG Service initialized successfully")
+ } catch (e: Exception) {
+ Timber.e(e, "[PluviaApp]: Failed to initialize GOG Service")
+ }
}
companion object {
diff --git a/app/src/main/java/app/gamenative/data/DownloadInfo.kt b/app/src/main/java/app/gamenative/data/DownloadInfo.kt
index cab140afe..95423db20 100644
--- a/app/src/main/java/app/gamenative/data/DownloadInfo.kt
+++ b/app/src/main/java/app/gamenative/data/DownloadInfo.kt
@@ -7,19 +7,31 @@ data class DownloadInfo(
val jobCount: Int = 1,
) {
private var downloadJob: Job? = null
+ private var progressMonitorJob: Job? = null
private val downloadProgressListeners = mutableListOf<((Float) -> Unit)>()
private val progresses: Array = Array(jobCount) { 0f }
private val weights = FloatArray(jobCount) { 1f } // ⇐ new
private var weightSum = jobCount.toFloat()
+
+ @Volatile
+ private var isCancelled = false
fun cancel() {
+ isCancelled = true
downloadJob?.cancel(CancellationException("Cancelled by user"))
+ progressMonitorJob?.cancel(CancellationException("Progress monitoring cancelled by user"))
}
+
+ fun isCancelled(): Boolean = isCancelled
fun setDownloadJob(job: Job) {
downloadJob = job
}
+
+ fun setProgressMonitorJob(job: Job) {
+ progressMonitorJob = job
+ }
fun getProgress(): Float {
var total = 0f
diff --git a/app/src/main/java/app/gamenative/data/GOGGame.kt b/app/src/main/java/app/gamenative/data/GOGGame.kt
new file mode 100644
index 000000000..b8e9daa8b
--- /dev/null
+++ b/app/src/main/java/app/gamenative/data/GOGGame.kt
@@ -0,0 +1,43 @@
+package app.gamenative.data
+
+import androidx.room.Entity
+import androidx.room.PrimaryKey
+
+@Entity(tableName = "gog_games")
+data class GOGGame(
+ @PrimaryKey
+ val id: String,
+ val title: String,
+ val slug: String,
+ val downloadSize: Long = 0,
+ val installSize: Long = 0,
+ val isInstalled: Boolean = false,
+ val installPath: String = "",
+ val imageUrl: String = "",
+ val iconUrl: String = "",
+ val description: String = "",
+ val releaseDate: String = "",
+ val developer: String = "",
+ val publisher: String = "",
+ val genres: List = emptyList(),
+ val languages: List = emptyList(),
+ val lastPlayed: Long = 0,
+ val playTime: Long = 0,
+)
+
+data class GOGCredentials(
+ val accessToken: String,
+ val refreshToken: String,
+ val userId: String,
+ val username: String,
+)
+
+data class GOGDownloadInfo(
+ val gameId: String,
+ val totalSize: Long,
+ val downloadedSize: Long = 0,
+ val progress: Float = 0f,
+ val isActive: Boolean = false,
+ val isPaused: Boolean = false,
+ val error: String? = null,
+)
diff --git a/app/src/main/java/app/gamenative/data/GameSource.kt b/app/src/main/java/app/gamenative/data/GameSource.kt
index bcfda5b88..35bcd2cda 100644
--- a/app/src/main/java/app/gamenative/data/GameSource.kt
+++ b/app/src/main/java/app/gamenative/data/GameSource.kt
@@ -2,5 +2,6 @@ package app.gamenative.data
enum class GameSource {
STEAM,
+ GOG,
// Add new game sources here
}
diff --git a/app/src/main/java/app/gamenative/data/LibraryItem.kt b/app/src/main/java/app/gamenative/data/LibraryItem.kt
index 3ffc7a7cc..e714c153c 100644
--- a/app/src/main/java/app/gamenative/data/LibraryItem.kt
+++ b/app/src/main/java/app/gamenative/data/LibraryItem.kt
@@ -1,6 +1,7 @@
package app.gamenative.data
import app.gamenative.Constants
+import app.gamenative.service.GameManagerService
/**
* Data class for the Library list
@@ -14,7 +15,7 @@ data class LibraryItem(
val gameSource: GameSource = GameSource.STEAM,
) {
val clientIconUrl: String
- get() = Constants.Library.ICON_URL + "$gameId/$iconHash.ico"
+ get() = GameManagerService.getIconImage(this)
/**
* Helper property to get the game ID as an integer
diff --git a/app/src/main/java/app/gamenative/db/PluviaDatabase.kt b/app/src/main/java/app/gamenative/db/PluviaDatabase.kt
index 4d201f557..874cd7165 100644
--- a/app/src/main/java/app/gamenative/db/PluviaDatabase.kt
+++ b/app/src/main/java/app/gamenative/db/PluviaDatabase.kt
@@ -7,12 +7,14 @@ import app.gamenative.data.ChangeNumbers
import app.gamenative.data.Emoticon
import app.gamenative.data.FileChangeLists
import app.gamenative.data.FriendMessage
+import app.gamenative.data.GOGGame
import app.gamenative.data.SteamApp
import app.gamenative.data.SteamFriend
import app.gamenative.data.SteamLicense
import app.gamenative.db.converters.AppConverter
import app.gamenative.db.converters.ByteArrayConverter
import app.gamenative.db.converters.FriendConverter
+import app.gamenative.db.converters.GOGConverter
import app.gamenative.db.converters.LicenseConverter
import app.gamenative.db.converters.PathTypeConverter
import app.gamenative.db.converters.UserFileInfoListConverter
@@ -20,6 +22,7 @@ import app.gamenative.db.dao.ChangeNumbersDao
import app.gamenative.db.dao.EmoticonDao
import app.gamenative.db.dao.FileChangeListsDao
import app.gamenative.db.dao.FriendMessagesDao
+import app.gamenative.db.dao.GOGGameDao
import app.gamenative.db.dao.SteamAppDao
import app.gamenative.db.dao.SteamFriendDao
import app.gamenative.db.dao.SteamLicenseDao
@@ -35,14 +38,16 @@ const val DATABASE_NAME = "pluvia.db"
FileChangeLists::class,
FriendMessage::class,
Emoticon::class,
+ GOGGame::class,
],
- version = 3,
+ version = 4, // Increment version for new entity
exportSchema = false, // Should export once stable.
)
@TypeConverters(
AppConverter::class,
ByteArrayConverter::class,
FriendConverter::class,
+ GOGConverter::class,
LicenseConverter::class,
PathTypeConverter::class,
UserFileInfoListConverter::class,
@@ -62,4 +67,6 @@ abstract class PluviaDatabase : RoomDatabase() {
abstract fun friendMessagesDao(): FriendMessagesDao
abstract fun emoticonDao(): EmoticonDao
+
+ abstract fun gogGameDao(): GOGGameDao
}
diff --git a/app/src/main/java/app/gamenative/db/converters/GOGConverter.kt b/app/src/main/java/app/gamenative/db/converters/GOGConverter.kt
new file mode 100644
index 000000000..c68901338
--- /dev/null
+++ b/app/src/main/java/app/gamenative/db/converters/GOGConverter.kt
@@ -0,0 +1,21 @@
+package app.gamenative.db.converters
+
+import androidx.room.TypeConverter
+import kotlinx.serialization.json.Json
+
+class GOGConverter {
+
+ @TypeConverter
+ fun fromStringList(value: List): String {
+ return Json.encodeToString(value)
+ }
+
+ @TypeConverter
+ fun toStringList(value: String): List {
+ return try {
+ Json.decodeFromString>(value)
+ } catch (e: Exception) {
+ emptyList()
+ }
+ }
+}
diff --git a/app/src/main/java/app/gamenative/db/dao/GOGGameDao.kt b/app/src/main/java/app/gamenative/db/dao/GOGGameDao.kt
new file mode 100644
index 000000000..dff5f5625
--- /dev/null
+++ b/app/src/main/java/app/gamenative/db/dao/GOGGameDao.kt
@@ -0,0 +1,75 @@
+package app.gamenative.db.dao
+
+import androidx.room.Dao
+import androidx.room.Delete
+import androidx.room.Insert
+import androidx.room.OnConflictStrategy
+import androidx.room.Query
+import androidx.room.Transaction
+import androidx.room.Update
+import app.gamenative.data.GOGGame
+import kotlinx.coroutines.flow.Flow
+
+@Dao
+interface GOGGameDao {
+
+ @Insert(onConflict = OnConflictStrategy.REPLACE)
+ suspend fun insert(game: GOGGame)
+
+ @Insert(onConflict = OnConflictStrategy.REPLACE)
+ suspend fun insertAll(games: List)
+
+ @Update
+ suspend fun update(game: GOGGame)
+
+ @Delete
+ suspend fun delete(game: GOGGame)
+
+ @Query("DELETE FROM gog_games WHERE id = :gameId")
+ suspend fun deleteById(gameId: String)
+
+ @Query("SELECT * FROM gog_games WHERE id = :gameId")
+ suspend fun getById(gameId: String): GOGGame?
+
+ @Query("SELECT * FROM gog_games ORDER BY title ASC")
+ fun getAll(): Flow>
+
+ @Query("SELECT * FROM gog_games ORDER BY title ASC")
+ suspend fun getAllAsList(): List
+
+ @Query("SELECT * FROM gog_games WHERE isInstalled = :isInstalled ORDER BY title ASC")
+ fun getByInstallStatus(isInstalled: Boolean): Flow>
+
+ @Query("SELECT * FROM gog_games WHERE title LIKE '%' || :searchQuery || '%' ORDER BY title ASC")
+ fun searchByTitle(searchQuery: String): Flow>
+
+ @Query("DELETE FROM gog_games")
+ suspend fun deleteAll()
+
+ @Query("SELECT COUNT(*) FROM gog_games")
+ fun getCount(): Flow
+
+ @Transaction
+ suspend fun replaceAll(games: List) {
+ deleteAll()
+ insertAll(games)
+ }
+
+ @Transaction
+ suspend fun upsertPreservingInstallStatus(games: List) {
+ games.forEach { newGame ->
+ val existingGame = getById(newGame.id)
+ if (existingGame != null) {
+ // Preserve installation status and path from existing game
+ val gameToInsert = newGame.copy(
+ isInstalled = existingGame.isInstalled,
+ installPath = existingGame.installPath,
+ )
+ insert(gameToInsert)
+ } else {
+ // New game, insert as-is
+ insert(newGame)
+ }
+ }
+ }
+}
diff --git a/app/src/main/java/app/gamenative/di/DatabaseModule.kt b/app/src/main/java/app/gamenative/di/DatabaseModule.kt
index c6f890e29..63331ee44 100644
--- a/app/src/main/java/app/gamenative/di/DatabaseModule.kt
+++ b/app/src/main/java/app/gamenative/di/DatabaseModule.kt
@@ -52,4 +52,8 @@ class DatabaseModule {
@Provides
@Singleton
fun provideEmoticonDao(db: PluviaDatabase) = db.emoticonDao()
+
+ @Provides
+ @Singleton
+ fun provideGOGGameDao(db: PluviaDatabase) = db.gogGameDao()
}
diff --git a/app/src/main/java/app/gamenative/di/NetworkModule.kt b/app/src/main/java/app/gamenative/di/NetworkModule.kt
new file mode 100644
index 000000000..e14644d5b
--- /dev/null
+++ b/app/src/main/java/app/gamenative/di/NetworkModule.kt
@@ -0,0 +1,24 @@
+package app.gamenative.di
+
+import dagger.Module
+import dagger.Provides
+import dagger.hilt.InstallIn
+import dagger.hilt.components.SingletonComponent
+import java.util.concurrent.TimeUnit
+import javax.inject.Singleton
+import okhttp3.OkHttpClient
+
+@InstallIn(SingletonComponent::class)
+@Module
+class NetworkModule {
+
+ @Provides
+ @Singleton
+ fun provideOkHttpClient(): OkHttpClient {
+ return OkHttpClient.Builder()
+ .connectTimeout(30, TimeUnit.SECONDS)
+ .readTimeout(30, TimeUnit.SECONDS)
+ .writeTimeout(30, TimeUnit.SECONDS)
+ .build()
+ }
+}
diff --git a/app/src/main/java/app/gamenative/enums/Marker.kt b/app/src/main/java/app/gamenative/enums/Marker.kt
index 4a2140f7b..bbd7388f6 100644
--- a/app/src/main/java/app/gamenative/enums/Marker.kt
+++ b/app/src/main/java/app/gamenative/enums/Marker.kt
@@ -2,6 +2,7 @@ package app.gamenative.enums
enum class Marker(val fileName: String ) {
DOWNLOAD_COMPLETE_MARKER(".download_complete"),
+ DOWNLOAD_IN_PROGRESS_MARKER(".download_in_progress"),
STEAM_DLL_REPLACED(".steam_dll_replaced"),
STEAM_DLL_RESTORED(".steam_dll_restored"),
}
diff --git a/app/src/main/java/app/gamenative/service/GOG/GOGConstants.kt b/app/src/main/java/app/gamenative/service/GOG/GOGConstants.kt
new file mode 100644
index 000000000..693ec0d49
--- /dev/null
+++ b/app/src/main/java/app/gamenative/service/GOG/GOGConstants.kt
@@ -0,0 +1,25 @@
+package app.gamenative.service.GOG
+
+/**
+ * Constants for GOG game service
+ */
+object GOGConstants {
+ /**
+ * Base storage path for GOG games
+ * This path must match the E: drive mount in Winlator: /data/data/app.gamenative/storage
+ */
+ const val GOG_GAMES_BASE_PATH = "/data/data/app.gamenative/storage/gog_games"
+
+ /**
+ * Default directory name for GOG game installations
+ */
+ const val GOG_GAME_DIR_PREFIX = "game_"
+
+ /**
+ * Get the full path for a GOG game installation
+ */
+ fun getGameInstallPath(gameTitle: String): String {
+ val sanitizedTitle = gameTitle.replace(Regex("[^a-zA-Z0-9\\s-_]"), "").trim()
+ return "$GOG_GAMES_BASE_PATH/$sanitizedTitle"
+ }
+}
diff --git a/app/src/main/java/app/gamenative/service/GOG/GOGGameManager.kt b/app/src/main/java/app/gamenative/service/GOG/GOGGameManager.kt
new file mode 100644
index 000000000..1015b7b56
--- /dev/null
+++ b/app/src/main/java/app/gamenative/service/GOG/GOGGameManager.kt
@@ -0,0 +1,734 @@
+package app.gamenative.service.GOG
+
+import android.content.Context
+import android.net.Uri
+import androidx.core.net.toUri
+import app.gamenative.R
+import app.gamenative.data.DownloadInfo
+import app.gamenative.data.GOGGame
+import app.gamenative.data.GOGGameWrapper
+import app.gamenative.data.Game
+import app.gamenative.data.GameSource
+import app.gamenative.data.LaunchInfo
+import app.gamenative.data.LibraryItem
+import app.gamenative.data.PostSyncInfo
+import app.gamenative.data.SteamApp
+import app.gamenative.db.dao.GOGGameDao
+import app.gamenative.enums.AppType
+import app.gamenative.enums.ControllerSupport
+import app.gamenative.enums.Marker
+import app.gamenative.enums.OS
+import app.gamenative.enums.ReleaseState
+import app.gamenative.enums.SyncResult
+import app.gamenative.service.GameManager
+import app.gamenative.ui.component.dialog.state.MessageDialogState
+import app.gamenative.ui.enums.DialogType
+import app.gamenative.utils.ContainerUtils
+import app.gamenative.utils.MarkerUtils
+import app.gamenative.utils.StorageUtils
+import com.winlator.container.Container
+import com.winlator.core.envvars.EnvVars
+import com.winlator.xenvironment.components.GuestProgramLauncherComponent
+import java.io.File
+import java.text.SimpleDateFormat
+import java.util.Date
+import java.util.EnumSet
+import java.util.Locale
+import javax.inject.Inject
+import javax.inject.Singleton
+import kotlinx.coroutines.CoroutineScope
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.launch
+import kotlinx.coroutines.flow.Flow
+import kotlinx.coroutines.flow.map
+import kotlinx.coroutines.runBlocking
+import kotlinx.coroutines.withContext
+import timber.log.Timber
+
+
+@Singleton
+class GOGGameManager @Inject constructor(
+ private val gogGameDao: GOGGameDao,
+) : GameManager {
+
+
+ override fun downloadGame(context: Context, libraryItem: LibraryItem): Result {
+ try {
+ // Check if another download is already in progress
+ if (GOGService.hasActiveDownload()) {
+ return Result.failure(Exception("Another GOG game is already downloading. Please wait for it to finish before starting a new download."))
+ }
+
+ // Check authentication first
+ if (!GOGService.hasStoredCredentials(context)) {
+ return Result.failure(Exception("GOG authentication required. Please log in to your GOG account first."))
+ }
+
+ // Validate credentials and refresh if needed
+ val validationResult = runBlocking { GOGService.validateCredentials(context) }
+ if (!validationResult.isSuccess || !validationResult.getOrDefault(false)) {
+ return Result.failure(Exception("GOG authentication is invalid. Please re-authenticate."))
+ }
+
+ val installPath = getGameInstallPath(context, libraryItem.appId, libraryItem.name)
+ val authConfigPath = "${context.filesDir}/gog_auth.json"
+
+ Timber.i("Starting GOG game installation: ${libraryItem.name} to $installPath")
+
+ // Use the new download method that returns DownloadInfo
+ val result = runBlocking { GOGService.downloadGame(libraryItem.appId, installPath, authConfigPath) }
+
+ if (result.isSuccess) {
+ val downloadInfo = result.getOrNull()
+ if (downloadInfo != null) {
+ // Add download in progress marker and remove completion marker
+ val appDirPath = getAppDirPath(libraryItem.appId)
+ MarkerUtils.removeMarker(appDirPath, Marker.DOWNLOAD_COMPLETE_MARKER)
+ MarkerUtils.addMarker(appDirPath, Marker.DOWNLOAD_IN_PROGRESS_MARKER)
+
+ // Add a progress listener to update markers when download completes
+ downloadInfo.addProgressListener { progress ->
+ when {
+ progress >= 1.0f -> {
+ // Download completed successfully
+ MarkerUtils.removeMarker(appDirPath, Marker.DOWNLOAD_IN_PROGRESS_MARKER)
+ MarkerUtils.addMarker(appDirPath, Marker.DOWNLOAD_COMPLETE_MARKER)
+ Timber.i("GOG game installation completed: ${libraryItem.name}")
+ }
+ progress < 0.0f -> {
+ // Download failed or cancelled
+ MarkerUtils.removeMarker(appDirPath, Marker.DOWNLOAD_IN_PROGRESS_MARKER)
+ MarkerUtils.removeMarker(appDirPath, Marker.DOWNLOAD_COMPLETE_MARKER)
+ Timber.i("GOG game installation failed/cancelled: ${libraryItem.name}")
+ }
+ }
+ }
+
+ Timber.i("GOG game installation started successfully: ${libraryItem.name}")
+ }
+ return Result.success(downloadInfo)
+ } else {
+ val error = result.exceptionOrNull() ?: Exception("Unknown download error")
+ Timber.e(error, "Failed to install GOG game: ${libraryItem.name}")
+ return Result.failure(error)
+ }
+ } catch (e: Exception) {
+ Timber.e(e, "Failed to install GOG game: ${libraryItem.name}")
+ return Result.failure(e)
+ }
+ }
+
+ override fun deleteGame(context: Context, libraryItem: LibraryItem): Result {
+ try {
+ val gameId = libraryItem.gameId.toString()
+ val installPath = getGameInstallPath(context, gameId, libraryItem.name)
+ val installDir = File(installPath)
+
+ // Delete the manifest file to ensure fresh downloads on reinstall
+ val manifestPath = File(context.filesDir, "manifests/$gameId")
+ if (manifestPath.exists()) {
+ val manifestDeleted = manifestPath.delete()
+ if (manifestDeleted) {
+ Timber.i("Deleted manifest file for game $gameId")
+ } else {
+ Timber.w("Failed to delete manifest file for game $gameId")
+ }
+ }
+
+ if (installDir.exists()) {
+ val success = installDir.deleteRecursively()
+ if (success) {
+ // Remove all markers
+ val appDirPath = getAppDirPath(libraryItem.appId)
+ MarkerUtils.removeMarker(appDirPath, Marker.DOWNLOAD_COMPLETE_MARKER)
+ MarkerUtils.removeMarker(appDirPath, Marker.DOWNLOAD_IN_PROGRESS_MARKER)
+
+ // Cancel and clean up any active download
+ GOGService.cancelDownload(libraryItem.appId)
+ GOGService.cleanupDownload(libraryItem.appId)
+
+ // Update database to mark as not installed
+ val game = runBlocking { getGameById(gameId) }
+ if (game != null) {
+ val updatedGame = game.copy(
+ isInstalled = false,
+ installPath = "",
+ )
+ runBlocking { gogGameDao.update(updatedGame) }
+ }
+
+ Timber.i("GOG game ${libraryItem.name} deleted successfully")
+ return Result.success(Unit)
+ } else {
+ return Result.failure(Exception("Failed to delete GOG game directory"))
+ }
+ } else {
+ Timber.w("GOG game directory doesn't exist: $installPath")
+ // Remove all markers even if directory doesn't exist
+ val appDirPath = getAppDirPath(libraryItem.appId)
+ MarkerUtils.removeMarker(appDirPath, Marker.DOWNLOAD_COMPLETE_MARKER)
+ MarkerUtils.removeMarker(appDirPath, Marker.DOWNLOAD_IN_PROGRESS_MARKER)
+
+ // Cancel and clean up any active download
+ GOGService.cancelDownload(libraryItem.appId)
+ GOGService.cleanupDownload(libraryItem.appId)
+
+ // Update database anyway to ensure consistency
+ val game = runBlocking { getGameById(gameId) }
+ if (game != null) {
+ val updatedGame = game.copy(
+ isInstalled = false,
+ installPath = "",
+ )
+ runBlocking { gogGameDao.update(updatedGame) }
+ }
+
+ return Result.success(Unit) // Consider it already deleted
+ }
+ } catch (e: Exception) {
+ Timber.e(e, "Failed to delete GOG game ${libraryItem.gameId}")
+ return Result.failure(e)
+ }
+ }
+
+ override fun isGameInstalled(context: Context, libraryItem: LibraryItem): Boolean {
+ try {
+ val appDirPath = getAppDirPath(libraryItem.appId)
+
+ // Use marker-based approach for reliable state tracking
+ val isDownloadComplete = MarkerUtils.hasMarker(appDirPath, Marker.DOWNLOAD_COMPLETE_MARKER)
+ val isDownloadInProgress = MarkerUtils.hasMarker(appDirPath, Marker.DOWNLOAD_IN_PROGRESS_MARKER)
+
+ // Game is installed only if download is complete and not in progress
+ val isInstalled = isDownloadComplete && !isDownloadInProgress
+
+ // Update database if the install status has changed
+ val gameId = libraryItem.gameId.toString()
+ val game = runBlocking { getGameById(gameId) }
+ if (game != null && isInstalled != game.isInstalled) {
+ val installPath = if (isInstalled) getGameInstallPath(context, gameId, libraryItem.name) else ""
+ val updatedGame = game.copy(
+ isInstalled = isInstalled,
+ installPath = installPath,
+ )
+ runBlocking { gogGameDao.update(updatedGame) }
+ }
+
+ return isInstalled
+ } catch (e: Exception) {
+ Timber.e(e, "Error checking if GOG game is installed")
+ return false
+ }
+ }
+
+ override suspend fun isUpdatePending(libraryItem: LibraryItem): Boolean {
+ return false // Not implemented yet.
+ }
+
+ override fun getDownloadInfo(libraryItem: LibraryItem): DownloadInfo? {
+ return GOGService.getDownloadInfo(libraryItem.appId)
+ }
+
+ override fun hasPartialDownload(libraryItem: LibraryItem): Boolean {
+ try {
+ val appDirPath = getAppDirPath(libraryItem.appId)
+
+ // Use marker-based approach for reliable state tracking
+ val isDownloadInProgress = MarkerUtils.hasMarker(appDirPath, Marker.DOWNLOAD_IN_PROGRESS_MARKER)
+ val isDownloadComplete = MarkerUtils.hasMarker(appDirPath, Marker.DOWNLOAD_COMPLETE_MARKER)
+
+ // Has partial download if download is in progress or if there are files but no completion marker
+ if (isDownloadInProgress) {
+ return true
+ }
+
+ // Also check if there are files in the directory but no completion marker (interrupted download)
+ if (!isDownloadComplete) {
+ val gameId = libraryItem.gameId.toString()
+ val gameName = libraryItem.name
+ // Use GOGConstants directly since we don't have context here and it's not needed
+ val installPath = GOGConstants.getGameInstallPath(gameName)
+ val installDir = File(installPath)
+
+ // If directory has files but no completion marker, it's a partial download
+ return installDir.exists() && installDir.listFiles()?.isNotEmpty() == true
+ }
+
+ return false
+ } catch (e: Exception) {
+ Timber.w(e, "Error checking partial download status for ${libraryItem.name}")
+ return false
+ }
+ }
+
+ override suspend fun getGameDiskSize(context: Context, libraryItem: LibraryItem): String = withContext(Dispatchers.IO) {
+ // Calculate size from install directory
+ val installPath = getGameInstallPath(context, libraryItem.appId, libraryItem.name)
+ val folderSize = StorageUtils.getFolderSize(installPath)
+
+ StorageUtils.formatBinarySize(folderSize)
+ }
+
+ override fun getAppDirPath(appId: String): String {
+ // Extract the numeric game ID from the appId
+ val gameId = ContainerUtils.extractGameIdFromContainerId(appId)
+
+ // Get the game details to find the correct title
+ val game = runBlocking { getGameById(gameId.toString()) }
+ if (game != null) {
+ // Return the specific game installation path
+ val gamePath = GOGConstants.getGameInstallPath(game.title)
+ Timber.d("GOG getAppDirPath for appId $appId (game: ${game.title}) -> $gamePath")
+ return gamePath
+ }
+
+ // Fallback to base path if game not found (shouldn't happen normally)
+ Timber.w("Could not find game for appId $appId, using base path")
+ return GOGConstants.GOG_GAMES_BASE_PATH
+ }
+
+ override suspend fun launchGameWithSaveSync(
+ context: Context,
+ libraryItem: LibraryItem,
+ parentScope: CoroutineScope,
+ ignorePendingOperations: Boolean,
+ preferredSave: Int?,
+ ): PostSyncInfo = withContext(Dispatchers.IO) {
+ try {
+ Timber.i("Starting GOG game launch with save sync for ${libraryItem.name}")
+
+ // Check if GOG credentials exist
+ if (!GOGService.hasStoredCredentials(context)) {
+ Timber.w("No GOG credentials found, skipping cloud save sync")
+ return@withContext PostSyncInfo(SyncResult.Success) // Continue without sync
+ }
+
+ // Determine save path for GOG game
+ val savePath = "${getGameInstallPath(context, libraryItem.appId, libraryItem.name)}/saves"
+ val authConfigPath = "${context.filesDir}/gog_auth.json"
+
+ Timber.i("Starting GOG cloud save sync for game ${libraryItem.gameId}")
+
+ // Perform GOG cloud save sync
+ val syncResult = GOGService.syncCloudSaves(
+ gameId = libraryItem.gameId.toString(),
+ savePath = savePath,
+ authConfigPath = authConfigPath,
+ timestamp = 0.0f,
+ )
+
+ if (syncResult.isSuccess) {
+ Timber.i("GOG cloud save sync completed successfully")
+ PostSyncInfo(SyncResult.Success)
+ } else {
+ val error = syncResult.exceptionOrNull()
+ Timber.e(error, "GOG cloud save sync failed")
+ PostSyncInfo(SyncResult.UnknownFail)
+ }
+ } catch (e: Exception) {
+ Timber.e(e, "GOG cloud save sync exception for game ${libraryItem.gameId}")
+ PostSyncInfo(SyncResult.UnknownFail)
+ }
+ }
+
+ override fun getStoreUrl(libraryItem: LibraryItem): Uri {
+ val gogGame = runBlocking { getGameById(libraryItem.gameId.toString()) }
+ val slug = gogGame?.slug ?: ""
+ return "https://www.gog.com/en/game/$slug".toUri()
+ }
+
+ override fun getWineStartCommand(
+ context: Context,
+ libraryItem: LibraryItem,
+ container: Container,
+ bootToContainer: Boolean,
+ appLaunchInfo: LaunchInfo?,
+ envVars: EnvVars,
+ guestProgramLauncherComponent: GuestProgramLauncherComponent,
+ ): String {
+ // For GOG games, we always want to launch the actual game
+ // because GOG doesn't have appLaunchInfo like Steam does
+
+ // Extract the numeric game ID from appId using the existing utility function
+ val gameId = ContainerUtils.extractGameIdFromContainerId(libraryItem.appId)
+
+ // Get the game details to find the correct title
+ val game = runBlocking { getGameById(gameId.toString()) }
+ if (game == null) {
+ Timber.e("Game not found for ID: $gameId")
+ return "\"explorer.exe\""
+ }
+
+ Timber.i("Looking for GOG game '${game.title}' with ID: $gameId")
+
+ // Get the specific game installation directory using the existing function
+ val gameInstallPath = getGameInstallPath(context, gameId.toString(), game.title)
+ val gameDir = File(gameInstallPath)
+
+ if (!gameDir.exists()) {
+ Timber.e("Game installation directory does not exist: $gameInstallPath")
+ return "\"explorer.exe\""
+ }
+
+ Timber.i("Found game directory: ${gameDir.absolutePath}")
+
+ // Use GOGGameManager to get the correct executable
+ val executablePath = runBlocking { getInstalledExe(context, libraryItem) }
+
+ if (executablePath.isEmpty()) {
+ Timber.w("No executable found for GOG game ${libraryItem.name}, opening file manager")
+ return "\"explorer.exe\""
+ }
+
+ // Calculate the Windows path for the game subdirectory
+ val gameSubDirRelativePath = gameDir.relativeTo(File(GOGConstants.GOG_GAMES_BASE_PATH)).path.replace('\\', '/')
+ val windowsGamePath = "E:/gog_games/$gameSubDirRelativePath"
+
+ // Set WINEPATH to the game subdirectory on E: drive
+ envVars.put("WINEPATH", windowsGamePath)
+
+ // Set the working directory to the game directory
+ val gameWorkingDir = File(GOGConstants.GOG_GAMES_BASE_PATH, gameSubDirRelativePath)
+ guestProgramLauncherComponent.workingDir = gameWorkingDir
+ Timber.i("Setting working directory to: ${gameWorkingDir.absolutePath}")
+
+ val executableName = File(executablePath).name
+ Timber.i("GOG game executable name: $executableName")
+ Timber.i("GOG game Windows path: $windowsGamePath")
+ Timber.i("GOG game subdirectory relative path: $gameSubDirRelativePath")
+
+ // Determine structure type by checking if game_* subdirectory exists
+ val isV2Structure = gameDir.listFiles()?.any {
+ it.isDirectory && it.name.startsWith("game_$gameId")
+ } ?: false
+ Timber.i("Game structure type: ${if (isV2Structure) "V2" else "V1"}")
+
+ val fullCommand = "\"$windowsGamePath/$executablePath\""
+
+ Timber.i("Full Wine command will be: $fullCommand")
+ return fullCommand
+ }
+
+ override fun createLibraryItem(appId: String, gameId: String, context: Context): LibraryItem {
+ val gogGame = runBlocking { getGameById(gameId) }
+
+ return LibraryItem(
+ appId = appId,
+ name = gogGame?.title ?: "Unknown GOG Game",
+ iconHash = "", // GOG games don't have icon hashes like Steam
+ gameSource = GameSource.GOG,
+ )
+ }
+
+ // Simple cache for download sizes
+ private val downloadSizeCache = mutableMapOf()
+ private val loadingSizes = mutableSetOf()
+
+ override suspend fun getDownloadSize(libraryItem: LibraryItem): String {
+ val gameId = libraryItem.gameId.toString()
+
+ // Return cached result if available
+ downloadSizeCache[gameId]?.let { return it }
+
+ // Get size info directly (now properly async)
+ return try {
+ Timber.d("Getting download size for game $gameId")
+ val sizeInfo = GOGService.getGameSizeInfo(gameId)
+ val formattedSize = sizeInfo?.let { StorageUtils.formatBinarySize(it.downloadSize) } ?: "Unknown"
+
+ // Cache the result
+ downloadSizeCache[gameId] = formattedSize
+ Timber.d("Got download size for game $gameId: $formattedSize")
+
+ formattedSize
+ } catch (e: Exception) {
+ Timber.w(e, "Failed to get download size for game $gameId")
+ val errorResult = "Unknown"
+ downloadSizeCache[gameId] = errorResult
+ errorResult
+ }
+ }
+
+ /**
+ * Get cached download size if available
+ */
+ fun getCachedDownloadSize(gameId: String): String? {
+ return downloadSizeCache[gameId]
+ }
+
+ override fun isValidToDownload(library: LibraryItem): Boolean {
+ return true // GOG games are always downloadable if owned
+ }
+
+ override fun getAppInfo(libraryItem: LibraryItem): SteamApp? {
+ val gogGame = runBlocking { getGameById(libraryItem.gameId.toString()) }
+ return if (gogGame != null) {
+ convertGOGGameToSteamApp(gogGame)
+ } else {
+ null
+ }
+ }
+
+ override fun getReleaseDate(libraryItem: LibraryItem): String {
+ val appInfo = getAppInfo(libraryItem)
+ if (appInfo?.releaseDate == null || appInfo.releaseDate == 0L) {
+ return "Unknown"
+ }
+ val date = Date(appInfo.releaseDate)
+ return SimpleDateFormat("MMM dd, yyyy", Locale.getDefault()).format(date)
+ }
+
+ override fun getHeroImage(libraryItem: LibraryItem): String {
+ val gogGame = runBlocking { getGameById(libraryItem.gameId.toString()) }
+ val imageUrl = gogGame?.imageUrl ?: ""
+
+ // Fix GOG URLs that are missing the protocol
+ return if (imageUrl.startsWith("//")) {
+ "https:$imageUrl"
+ } else {
+ imageUrl
+ }
+ }
+
+ override fun getIconImage(libraryItem: LibraryItem): String {
+ return libraryItem.iconHash
+ }
+
+ override fun getInstallInfoDialog(context: Context, libraryItem: LibraryItem): MessageDialogState {
+ // GOG install logic
+ val gogInstallPath = "${context.dataDir.path}/gog_games"
+ val availableBytes = StorageUtils.getAvailableSpace(context.dataDir.path)
+ val availableSpace = StorageUtils.formatBinarySize(availableBytes)
+
+ // Get cached download size if available, otherwise show "Calculating..."
+ val gameId = libraryItem.gameId.toString()
+ val downloadSize = getCachedDownloadSize(gameId) ?: "Calculating..."
+
+ return MessageDialogState(
+ visible = true,
+ type = DialogType.INSTALL_APP,
+ title = context.getString(R.string.download_prompt_title),
+ message = "Install ${libraryItem.name} from GOG?" +
+ "\n\nDownload Size: $downloadSize" +
+ "\nInstall Path: $gogInstallPath/${libraryItem.name}" +
+ "\nAvailable Space: $availableSpace",
+ confirmBtnText = context.getString(R.string.proceed),
+ dismissBtnText = context.getString(R.string.cancel),
+ )
+ }
+
+ override fun runBeforeLaunch(context: Context, libraryItem: LibraryItem) {
+ // Don't run anything before launch for GOG games
+ }
+
+ override fun getAllGames(): Flow> {
+ return gogGameDao.getAll().map { gogGames ->
+ gogGames.map { gogGame -> GOGGameWrapper(gogGame) }
+ }
+ }
+
+ /**
+ * Get install path for a specific GOG game
+ */
+ fun getGameInstallPath(context: Context, gameId: String, gameTitle: String): String {
+ return GOGConstants.getGameInstallPath(gameTitle)
+ }
+
+ /**
+ * Get GOG game by ID from database
+ */
+ suspend fun getGameById(gameId: String): GOGGame? = withContext(Dispatchers.IO) {
+ try {
+ gogGameDao.getById(gameId)
+ } catch (e: Exception) {
+ Timber.e(e, "Failed to get GOG game by ID: $gameId")
+ null
+ }
+ }
+
+ /**
+ * Get the executable path for an installed GOG game.
+ * Handles both V1 and V2 game directory structures.
+ */
+ suspend fun getInstalledExe(context: Context, libraryItem: LibraryItem): String = withContext(Dispatchers.IO) {
+ val gameId = libraryItem.gameId
+ try {
+ val game = runBlocking { getGameById(gameId.toString()) } ?: return@withContext ""
+ val installPath = getGameInstallPath(context, game.id, game.title)
+
+ // Try V2 structure first (game_$gameId subdirectory)
+ val v2GameDir = File(installPath, "game_$gameId")
+ if (v2GameDir.exists()) {
+ Timber.i("Found V2 game structure: ${v2GameDir.absolutePath}")
+ return@withContext getGameExecutable(installPath, v2GameDir)
+ } else {
+ // Try V1 structure (look for any subdirectory in the install path)
+ val installDirFile = File(installPath)
+ val subdirs = installDirFile.listFiles()?.filter {
+ it.isDirectory && it.name != "saves"
+ } ?: emptyList()
+
+ if (subdirs.isNotEmpty()) {
+ // For V1 games, find the subdirectory with .exe files
+ val v1GameDir = subdirs.find { subdir ->
+ val exeFiles = subdir.listFiles()?.filter {
+ it.isFile &&
+ it.name.endsWith(".exe", ignoreCase = true) &&
+ !isGOGUtilityExecutable(it.name)
+ } ?: emptyList()
+ exeFiles.isNotEmpty()
+ }
+
+ if (v1GameDir != null) {
+ Timber.i("Found V1 game structure: ${v1GameDir.absolutePath}")
+ return@withContext getGameExecutable(installPath, v1GameDir)
+ } else {
+ Timber.w("No V1 game subdirectories with executables found in: $installPath")
+ return@withContext ""
+ }
+ } else {
+ Timber.w("No game directories found in: $installPath")
+ return@withContext ""
+ }
+ }
+ } catch (e: Exception) {
+ Timber.e(e, "Failed to get executable for GOG game $gameId")
+ ""
+ }
+ }
+
+ /**
+ * Check if an executable is a GOG utility (should be skipped)
+ */
+ private fun isGOGUtilityExecutable(filename: String): Boolean {
+ return filename.equals("unins000.exe", ignoreCase = true) ||
+ filename.equals("CheckApplication.exe", ignoreCase = true) ||
+ filename.equals("SettingsApplication.exe", ignoreCase = true)
+ }
+
+ private fun getGameExecutable(installPath: String, gameDir: File): String {
+ // Get the main executable from GOG game info file
+ val mainExe = getMainExecutableFromGOGInfo(gameDir, installPath)
+
+ if (mainExe.isNotEmpty()) {
+ Timber.i("Found GOG game executable from info file: $mainExe")
+ return mainExe
+ }
+
+ Timber.e("Failed to find executable from GOG info file in: ${gameDir.absolutePath}")
+ return ""
+ }
+
+ private fun getMainExecutableFromGOGInfo(gameDir: File, installPath: String): String {
+ // Look for goggame-*.info file
+ val infoFile = gameDir.listFiles()?.find {
+ it.isFile && it.name.startsWith("goggame-") && it.name.endsWith(".info")
+ }
+
+ if (infoFile == null) {
+ throw Exception("GOG info file not found in: ${gameDir.absolutePath}")
+ }
+
+ val content = infoFile.readText()
+ Timber.d("GOG info file content: $content")
+
+ // Parse JSON to find the primary task
+ val jsonObject = org.json.JSONObject(content)
+
+ // Look for playTasks array
+ if (!jsonObject.has("playTasks")) {
+ throw Exception("GOG info file does not contain playTasks array")
+ }
+
+ val playTasks = jsonObject.getJSONArray("playTasks")
+
+ // Find the primary task
+ for (i in 0 until playTasks.length()) {
+ val task = playTasks.getJSONObject(i)
+ if (task.has("isPrimary") && task.getBoolean("isPrimary")) {
+ val executablePath = task.getString("path")
+
+ Timber.i("Found primary task executable path: $executablePath")
+
+ // Check if the executable actually exists (case-insensitive)
+ val actualExeFile = gameDir.listFiles()?.find {
+ it.name.equals(executablePath, ignoreCase = true)
+ }
+ if (actualExeFile != null && actualExeFile.exists()) {
+ return "${gameDir.name}/${actualExeFile.name}"
+ } else {
+ Timber.w("Primary task executable '$executablePath' not found in game directory")
+ }
+ break
+ }
+ }
+
+ return ""
+ }
+
+
+ /**
+ * Convert GOGGame to SteamApp format for compatibility with existing UI components.
+ * This allows GOG games to be displayed using the same UI components as Steam games.
+ */
+ private fun convertGOGGameToSteamApp(gogGame: GOGGame): SteamApp {
+ // Convert release date string (ISO format like "2021-06-17T15:55:+0300") to timestamp
+ val releaseTimestamp = try {
+ if (gogGame.releaseDate.isNotEmpty()) {
+ // Try different date formats that GOG might use
+ val formats = arrayOf(
+ SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ZZZZZ", Locale.US), // 2021-06-17T15:55:+0300
+ SimpleDateFormat("yyyy-MM-dd'T'HH:mmZ", Locale.US), // 2021-06-17T15:55+0300
+ SimpleDateFormat("yyyy-MM-dd", Locale.US), // 2021-06-17
+ SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss", Locale.US), // 2021-06-17T15:55:30
+ )
+
+ var parsedDate: Date? = null
+ for (format in formats) {
+ try {
+ parsedDate = format.parse(gogGame.releaseDate)
+ break
+ } catch (e: Exception) {
+ // Try next format
+ }
+ }
+
+ parsedDate?.time ?: 0L
+ } else {
+ 0L
+ }
+ } catch (e: Exception) {
+ Timber.w(e, "Failed to parse release date: ${gogGame.releaseDate}")
+ 0L
+ }
+
+ // Convert GOG game ID (string) to integer for SteamApp compatibility
+ val appId = try {
+ gogGame.id.toIntOrNull() ?: gogGame.id.hashCode()
+ } catch (e: Exception) {
+ gogGame.id.hashCode()
+ }
+
+ return SteamApp(
+ id = appId,
+ name = gogGame.title,
+ type = AppType.game,
+ osList = EnumSet.of(OS.windows),
+ releaseState = ReleaseState.released,
+ releaseDate = releaseTimestamp,
+ developer = gogGame.developer.takeIf { it.isNotEmpty() } ?: "Unknown Developer",
+ publisher = gogGame.publisher.takeIf { it.isNotEmpty() } ?: "Unknown Publisher",
+ controllerSupport = ControllerSupport.none,
+ logoHash = "",
+ iconHash = "",
+ clientIconHash = "",
+ installDir = gogGame.title.replace(Regex("[^a-zA-Z0-9 ]"), "").trim(),
+ )
+ }
+
+ private suspend fun ensureValidCredentials(context: Context): Boolean {
+ val validationResult = GOGService.validateCredentials(context)
+ return validationResult.isSuccess && validationResult.getOrDefault(false)
+ }
+
+}
diff --git a/app/src/main/java/app/gamenative/service/GOG/GOGGameWrapper.kt b/app/src/main/java/app/gamenative/service/GOG/GOGGameWrapper.kt
new file mode 100644
index 000000000..1d9dfc037
--- /dev/null
+++ b/app/src/main/java/app/gamenative/service/GOG/GOGGameWrapper.kt
@@ -0,0 +1,27 @@
+package app.gamenative.data
+
+import app.gamenative.enums.AppType
+
+/**
+ * GOG game implementation
+ */
+data class GOGGameWrapper(
+ private val gogGame: GOGGame,
+) : Game {
+ override val id: String get() = gogGame.id
+ override val name: String get() = gogGame.title
+ override val source: GameSource get() = GameSource.GOG
+ override val isInstalled: Boolean get() = gogGame.isInstalled
+ override val isShared: Boolean get() = false
+ override val iconUrl: String get() = gogGame.iconUrl
+ override val appType: AppType get() = AppType.game
+
+ override fun toLibraryItem(index: Int): LibraryItem = LibraryItem(
+ index = index,
+ appId = "GOG_${gogGame.id}",
+ name = gogGame.title,
+ iconHash = iconUrl,
+ isShared = false,
+ gameSource = GameSource.GOG,
+ )
+}
diff --git a/app/src/main/java/app/gamenative/service/GOG/GOGLibraryManager.kt b/app/src/main/java/app/gamenative/service/GOG/GOGLibraryManager.kt
new file mode 100644
index 000000000..4da90d56e
--- /dev/null
+++ b/app/src/main/java/app/gamenative/service/GOG/GOGLibraryManager.kt
@@ -0,0 +1,158 @@
+package app.gamenative.service.GOG
+
+import android.content.Context
+import app.gamenative.db.dao.GOGGameDao
+import javax.inject.Inject
+import kotlinx.coroutines.*
+import timber.log.Timber
+
+class GOGLibraryManager @Inject constructor(
+ private val gogGameDao: GOGGameDao,
+) {
+
+ private val scope = CoroutineScope(SupervisorJob() + Dispatchers.IO)
+
+ // Track if background sync is already running
+ private var backgroundSyncInProgress = false
+
+ /**
+ * Start background library sync that progressively syncs games in batches
+ * Returns a Result indicating whether the sync was started successfully
+ */
+ suspend fun startBackgroundSync(context: Context, clearExisting: Boolean = false): Result {
+ if (backgroundSyncInProgress) {
+ Timber.i("Background GOG sync already in progress, skipping")
+ return Result.failure(Exception("Background sync already in progress"))
+ }
+
+ // Validate credentials before starting background sync
+ return try {
+ if (!GOGService.hasStoredCredentials(context)) {
+ Timber.w("No GOG credentials found, cannot start background sync")
+ return Result.failure(Exception("No GOG credentials found. Please log in first."))
+ }
+
+ val validationResult = GOGService.validateCredentials(context)
+ if (validationResult.isFailure || !validationResult.getOrThrow()) {
+ Timber.w("GOG credentials validation failed, cannot start background sync")
+ return Result.failure(Exception("GOG credentials validation failed. Please log in again."))
+ }
+
+ scope.launch {
+ backgroundSyncInProgress = true
+ syncLibraryInBackground(context, clearExisting)
+ backgroundSyncInProgress = false
+ }
+
+ Result.success(Unit)
+ } catch (e: Exception) {
+ Timber.e(e, "Failed to start background sync")
+ Result.failure(e)
+ }
+ }
+
+ /**
+ * Clear all GOG games from the database
+ */
+ suspend fun clearLibrary(): Result = withContext(Dispatchers.IO) {
+ try {
+ Timber.i("Clearing GOG library from database")
+ gogGameDao.deleteAll()
+ Timber.i("GOG library cleared successfully")
+ Result.success(Unit)
+ } catch (e: Exception) {
+ Timber.e(e, "Failed to clear GOG library")
+ Result.failure(e)
+ }
+ }
+
+ /**
+ * Background sync implementation with true progressive syncing
+ * Games appear in the library as soon as they're fetched from GOG API
+ */
+ private suspend fun syncLibraryInBackground(context: Context, clearExisting: Boolean = false) {
+ try {
+ Timber.i("Starting progressive background GOG library sync...")
+
+ val authConfigPath = "${context.filesDir}/gog_auth.json"
+
+ // Clear existing games if requested
+ if (clearExisting) {
+ Timber.i("Clearing existing GOG games before sync")
+ clearLibrary()
+ }
+
+ // Try progressive sync first (if available), fallback to batch sync
+ syncLibraryProgressively(context, authConfigPath)
+ } catch (e: Exception) {
+ Timber.e(e, "Exception during background GOG sync")
+ }
+ }
+
+ /**
+ * Progressive sync method
+ * Insert games one by one as they are fetched
+ */
+ private suspend fun syncLibraryProgressively(context: Context, authConfigPath: String): Result {
+ return try {
+ Timber.i("Starting progressive GOG library sync...")
+
+ // Validate credentials before making GOGDL calls
+ val validationResult = GOGService.validateCredentials(context)
+ if (validationResult.isFailure || !validationResult.getOrThrow()) {
+ Timber.w("GOG credentials validation failed, aborting progressive sync")
+ return Result.failure(Exception("GOG credentials validation failed"))
+ }
+
+ // Use the new progressive method that inserts games one by one
+ val libraryResult = GOGService.getUserLibraryProgressively(
+ context,
+ onGameFetched = { game ->
+ // Insert each game immediately as it's fetched
+ // All database operations are already in the same coroutine context
+ try {
+ val existingGame = gogGameDao.getById(game.id)
+ val gameToInsert = if (existingGame != null) {
+ game.copy(isInstalled = existingGame.isInstalled, installPath = existingGame.installPath)
+ } else {
+ game
+ }
+ gogGameDao.insert(gameToInsert)
+
+ Timber.d("Inserted game: ${game.title}")
+ } catch (e: Exception) {
+ Timber.e(e, "Failed to insert game: ${game.title}")
+ }
+ },
+ onTotalCount = { totalCount ->
+ Timber.d("Total games to sync: $totalCount")
+ },
+ )
+
+ if (libraryResult.isSuccess) {
+ val totalGames = libraryResult.getOrThrow()
+ Timber.i("Progressive GOG library sync completed successfully: $totalGames games")
+ Result.success(Unit)
+ } else {
+ val error = libraryResult.exceptionOrNull()
+ Timber.e("Failed to get library from GOG API: ${error?.message}")
+ Result.failure(error ?: Exception("Failed to get library"))
+ }
+ } catch (e: Exception) {
+ Timber.e(e, "Exception during progressive sync")
+ Result.failure(e)
+ }
+ }
+
+ /**
+ * Get the count of games in the local database
+ */
+ suspend fun getLocalGameCount(): Int = withContext(Dispatchers.IO) {
+ try {
+ gogGameDao.getAllAsList().size
+ } catch (e: Exception) {
+ Timber.e(e, "Failed to get local GOG game count")
+ 0
+ }
+ }
+}
diff --git a/app/src/main/java/app/gamenative/service/GOG/GOGService.kt b/app/src/main/java/app/gamenative/service/GOG/GOGService.kt
new file mode 100644
index 000000000..f4788e531
--- /dev/null
+++ b/app/src/main/java/app/gamenative/service/GOG/GOGService.kt
@@ -0,0 +1,1663 @@
+package app.gamenative.service.GOG
+
+import android.app.Service
+import android.content.Context
+import android.content.Intent
+import android.os.IBinder
+import app.gamenative.data.DownloadInfo
+import app.gamenative.data.GOGCredentials
+import app.gamenative.data.GOGGame
+import app.gamenative.service.NotificationHelper
+import app.gamenative.utils.ContainerUtils
+import com.chaquo.python.Kwarg
+import com.chaquo.python.PyObject
+import com.chaquo.python.Python
+import com.chaquo.python.android.AndroidPlatform
+import java.io.File
+import java.util.concurrent.ConcurrentHashMap
+import javax.inject.Inject
+import javax.inject.Singleton
+import kotlinx.coroutines.*
+import okhttp3.OkHttpClient
+import org.json.JSONObject
+import timber.log.Timber
+
+/**
+ * Data class to hold metadata extracted from GOG GamesDB
+ */
+private data class GameMetadata(
+ val developer: String = "Unknown Developer",
+ val publisher: String = "Unknown Publisher",
+ val title: String? = null,
+ val description: String? = null
+)
+
+/**
+ * Data class to hold size information from gogdl info command
+ */
+data class GameSizeInfo(
+ val downloadSize: Long,
+ val diskSize: Long
+)
+
+@Singleton
+class GOGService @Inject constructor() : Service() {
+
+ companion object {
+ private var instance: GOGService? = null
+ private var appContext: Context? = null
+ private var isInitialized = false
+ private var httpClient: OkHttpClient? = null
+ private var python: Python? = null
+
+ // Constants
+ private const val GOG_CLIENT_ID = "46899977096215655"
+
+ // Add sync tracking variables
+ private var syncInProgress: Boolean = false
+ private var backgroundSyncJob: Job? = null
+
+ val isRunning: Boolean
+ get() = instance != null
+
+ fun start(context: Context) {
+ if (!isRunning) {
+ val intent = Intent(context, GOGService::class.java)
+ context.startForegroundService(intent)
+ }
+ }
+
+ fun stop() {
+ instance?.let { service ->
+ service.stopSelf()
+ }
+ }
+
+ fun setHttpClient(client: OkHttpClient) {
+ httpClient = client
+ }
+
+ /**
+ * Initialize the GOG service with Chaquopy Python
+ */
+ fun initialize(context: Context): Boolean {
+ if (isInitialized) return true
+
+ try {
+ // Store the application context
+ appContext = context.applicationContext
+
+ Timber.i("Initializing GOG service with Chaquopy...")
+
+ // Initialize Python if not already started
+ if (!Python.isStarted()) {
+ Python.start(AndroidPlatform(context))
+ }
+ python = Python.getInstance()
+
+ isInitialized = true
+ Timber.i("GOG service initialized successfully with Chaquopy")
+
+ return isInitialized
+ } catch (e: Exception) {
+ Timber.e(e, "Exception during GOG service initialization")
+ return false
+ }
+ }
+
+ /**
+ * Execute GOGDL command using Chaquopy
+ */
+ suspend fun executeCommand(vararg args: String): Result {
+ return withContext(Dispatchers.IO) {
+ try {
+ val python = Python.getInstance()
+ val sys = python.getModule("sys")
+ val io = python.getModule("io")
+ val originalArgv = sys.get("argv")
+
+ try {
+ // Now import our Android-compatible GOGDL CLI module
+ val gogdlCli = python.getModule("gogdl.cli")
+
+ // Set up arguments for argparse
+ val argsList = listOf("gogdl") + args.toList()
+ Timber.d("Setting GOGDL arguments for argparse: ${args.joinToString(" ")}")
+ // Convert to Python list to avoid jarray issues
+ val pythonList = python.builtins.callAttr("list", argsList.toTypedArray())
+ sys.put("argv", pythonList)
+ Timber.d("sys.argv set to: $argsList")
+
+ // Capture stdout
+ val stdoutCapture = io.callAttr("StringIO")
+ val originalStdout = sys.get("stdout")
+ sys.put("stdout", stdoutCapture)
+
+ // Execute the main function
+ gogdlCli.callAttr("main")
+
+ // Get the captured output
+ val output = stdoutCapture.callAttr("getvalue").toString()
+ Timber.d("GOGDL output: $output")
+
+ // Restore original stdout
+ sys.put("stdout", originalStdout)
+
+ if (output.isNotEmpty()) {
+ Result.success(output)
+ } else {
+ Result.success("GOGDL execution completed")
+ }
+ } catch (e: Exception) {
+ Timber.d("GOGDL execution completed with exception: ${e.javaClass.simpleName} - ${e.message}")
+ Result.failure(Exception("GOGDL execution failed: $e"))
+ } finally {
+ // Restore original sys.argv
+ sys.put("argv", originalArgv)
+ }
+ } catch (e: Exception) {
+ Timber.e(e, "Failed to execute GOGDL command: ${args.joinToString(" ")}")
+ Result.failure(Exception("GOGDL execution failed: $e"))
+ }
+ }
+ }
+
+ /**
+ * Read and parse auth credentials from file
+ */
+ private fun readAuthCredentials(authConfigPath: String): Result> {
+ return try {
+ val authFile = File(authConfigPath)
+ Timber.d("Checking auth file at: ${authFile.absolutePath}")
+ Timber.d("Auth file exists: ${authFile.exists()}")
+
+ if (!authFile.exists()) {
+ return Result.failure(Exception("No authentication found. Please log in first."))
+ }
+
+ val authContent = authFile.readText()
+ Timber.d("Auth file content: $authContent")
+
+ val authJson = JSONObject(authContent)
+
+ // GOGDL stores credentials nested under client ID
+ val credentialsJson = if (authJson.has(GOG_CLIENT_ID)) {
+ authJson.getJSONObject(GOG_CLIENT_ID)
+ } else {
+ // Fallback: try to read from root level
+ authJson
+ }
+
+ val accessToken = credentialsJson.optString("access_token", "")
+ val userId = credentialsJson.optString("user_id", "")
+
+ Timber.d("Parsed access_token: ${if (accessToken.isNotEmpty()) "${accessToken.take(20)}..." else "EMPTY"}")
+ Timber.d("Parsed user_id: $userId")
+
+ if (accessToken.isEmpty() || userId.isEmpty()) {
+ Timber.e("Auth data validation failed - accessToken empty: ${accessToken.isEmpty()}, userId empty: ${userId.isEmpty()}")
+ return Result.failure(Exception("Invalid authentication data. Please log in again."))
+ }
+
+ Result.success(Pair(accessToken, userId))
+ } catch (e: Exception) {
+ Timber.e(e, "Failed to read auth credentials")
+ Result.failure(e)
+ }
+ }
+
+ /**
+ * Parse full GOGCredentials from auth file
+ */
+ private fun parseFullCredentials(authConfigPath: String): GOGCredentials {
+ return try {
+ val authFile = File(authConfigPath)
+ if (authFile.exists()) {
+ val authContent = authFile.readText()
+ val authJson = JSONObject(authContent)
+
+ // GOGDL stores credentials nested under client ID
+ val credentialsJson = if (authJson.has(GOG_CLIENT_ID)) {
+ authJson.getJSONObject(GOG_CLIENT_ID)
+ } else {
+ // Fallback: try to read from root level
+ authJson
+ }
+
+ GOGCredentials(
+ accessToken = credentialsJson.optString("access_token", ""),
+ refreshToken = credentialsJson.optString("refresh_token", ""),
+ userId = credentialsJson.optString("user_id", ""),
+ username = credentialsJson.optString("username", "GOG User"),
+ )
+ } else {
+ // Return dummy credentials for successful auth
+ GOGCredentials(
+ accessToken = "authenticated_${System.currentTimeMillis()}",
+ refreshToken = "refresh_${System.currentTimeMillis()}",
+ userId = "user_123",
+ username = "GOG User",
+ )
+ }
+ } catch (e: Exception) {
+ Timber.e(e, "Failed to parse auth result")
+ // Return dummy credentials as fallback
+ GOGCredentials(
+ accessToken = "fallback_token",
+ refreshToken = "fallback_refresh",
+ userId = "fallback_user",
+ username = "GOG User",
+ )
+ }
+ }
+
+ /**
+ * Create GOGCredentials from JSON output
+ */
+ private fun createCredentialsFromJson(outputJson: JSONObject): GOGCredentials {
+ return GOGCredentials(
+ accessToken = outputJson.optString("access_token", ""),
+ refreshToken = outputJson.optString("refresh_token", ""),
+ userId = outputJson.optString("user_id", ""),
+ username = "GOG User", // We don't have username in the token response
+ )
+ }
+
+ /**
+ * Authenticate with GOG using authorization code
+ */
+ suspend fun authenticateWithCode(authConfigPath: String, authorizationCode: String): Result {
+ return try {
+ Timber.i("Starting GOG authentication with authorization code...")
+
+ // Extract the actual authorization code from URL if needed
+ val actualCode = if (authorizationCode.startsWith("http")) {
+ // Extract code parameter from URL
+ val codeParam = authorizationCode.substringAfter("code=", "")
+ if (codeParam.isEmpty()) {
+ return Result.failure(Exception("Invalid authorization URL: no code parameter found"))
+ }
+ // Remove any additional parameters after the code
+ val cleanCode = codeParam.substringBefore("&")
+ Timber.d("Extracted authorization code from URL: ${cleanCode.take(20)}...")
+ cleanCode
+ } else {
+ authorizationCode
+ }
+
+ // Create auth config directory
+ val authFile = File(authConfigPath)
+ val authDir = authFile.parentFile
+ if (authDir != null && !authDir.exists()) {
+ authDir.mkdirs()
+ Timber.d("Created auth config directory: ${authDir.absolutePath}")
+ }
+
+ // Execute GOGDL auth command with the authorization code
+ Timber.d("Authenticating with auth config path: $authConfigPath, code: ${actualCode.take(10)}...")
+ Timber.d("Full auth command: --auth-config-path $authConfigPath auth --code ${actualCode.take(20)}...")
+
+ val result = executeCommand("--auth-config-path", authConfigPath, "auth", "--code=$actualCode")
+
+ if (result.isSuccess) {
+ val gogdlOutput = result.getOrNull() ?: ""
+ Timber.i("GOGDL command completed, checking authentication result...")
+ Timber.d("GOGDL output for auth: $gogdlOutput")
+
+ // First, check if GOGDL output indicates success
+ try {
+ val outputJson = JSONObject(gogdlOutput.trim())
+
+ // Check if the response indicates an error
+ if (outputJson.has("error") && outputJson.getBoolean("error")) {
+ val errorMsg = outputJson.optString("error_description", "Authentication failed")
+ Timber.e("GOG authentication failed: $errorMsg")
+ return Result.failure(Exception("GOG authentication failed: $errorMsg"))
+ }
+
+ // Check if we have the required fields for successful auth
+ val accessToken = outputJson.optString("access_token", "")
+ val userId = outputJson.optString("user_id", "")
+
+ if (accessToken.isEmpty() || userId.isEmpty()) {
+ Timber.e("GOG authentication incomplete: missing access_token or user_id in output")
+ return Result.failure(Exception("Authentication incomplete: missing required data"))
+ }
+
+ // GOGDL output looks good, now check if auth file was created
+ val authFile = File(authConfigPath)
+ if (authFile.exists()) {
+ // Parse authentication result from file
+ val authData = parseFullCredentials(authConfigPath)
+ Timber.i("GOG authentication successful for user: ${authData.username}")
+ Result.success(authData)
+ } else {
+ Timber.w("GOGDL returned success but no auth file created, using output data")
+ // Create credentials from GOGDL output
+ val credentials = createCredentialsFromJson(outputJson)
+ Result.success(credentials)
+ }
+ } catch (e: Exception) {
+ Timber.e(e, "Failed to parse GOGDL output")
+ // Fallback: check if auth file exists
+ val authFile = File(authConfigPath)
+ if (authFile.exists()) {
+ try {
+ val authData = parseFullCredentials(authConfigPath)
+ Timber.i("GOG authentication successful (fallback) for user: ${authData.username}")
+ Result.success(authData)
+ } catch (ex: Exception) {
+ Timber.e(ex, "Failed to parse auth file")
+ Result.failure(Exception("Failed to parse authentication result: ${ex.message}"))
+ }
+ } else {
+ Timber.e("GOG authentication failed: no auth file created and failed to parse output")
+ Result.failure(Exception("Authentication failed: no credentials available"))
+ }
+ }
+ } else {
+ val error = result.exceptionOrNull()?.message ?: "Authentication failed"
+ Timber.e("GOG authentication command failed: $error")
+ Result.failure(Exception(error))
+ }
+ } catch (e: Exception) {
+ Timber.e(e, "GOG authentication exception")
+ Result.failure(e)
+ }
+ }
+
+ /**
+ * Fetch rich metadata from GOG GamesDB API including developer and publisher info
+ */
+ private suspend fun fetchGamesDBMetadata(gameId: String): GameMetadata = withContext(Dispatchers.IO) {
+ try {
+ val python = Python.getInstance()
+ val requests = python.getModule("requests")
+
+ val gamesDbUrl = "https://gamesdb.gog.com/platforms/gog/external_releases/$gameId"
+
+ // Create headers dictionary for GamesDB
+ val gamesDbHeaders = python.builtins.callAttr("dict")
+ gamesDbHeaders.callAttr("__setitem__", "User-Agent", "GOGGalaxyClient/2.0.45.61 (Windows_x86_64)")
+
+ Timber.d("Fetching GOG game metadata from GamesDB for ID: $gameId")
+
+ val gamesDbResponse = requests.callAttr(
+ "get", gamesDbUrl,
+ Kwarg("headers", gamesDbHeaders),
+ Kwarg("timeout", 10),
+ )
+
+ val gamesDbStatusCode = gamesDbResponse.get("status_code")?.toInt() ?: 0
+ if (gamesDbStatusCode == 200) {
+ val gamesDbJson = gamesDbResponse.callAttr("json")
+ val gameData = gamesDbJson?.callAttr("get", "game")
+
+ // Extract developer information
+ val developers = extractDevelopers(gameData, gameId)
+
+ // Extract publisher information
+ val publishers = extractPublishers(gameData, gameId)
+
+ // Extract title and description from GamesDB
+ val title = gamesDbJson?.callAttr("get", "title")?.callAttr("get", "*")?.toString()
+ val description = gamesDbJson?.callAttr("get", "summary")?.callAttr("get", "*")?.toString()
+
+ return@withContext GameMetadata(
+ developer = if (developers.isNotEmpty()) developers.joinToString(", ") else "Unknown Developer",
+ publisher = if (publishers.isNotEmpty()) publishers.joinToString(", ") else "Unknown Publisher",
+ title = title,
+ description = description
+ )
+ }
+ } catch (e: Exception) {
+ Timber.w(e, "Error fetching GamesDB metadata for game $gameId")
+ }
+
+ return@withContext GameMetadata()
+ }
+
+ /**
+ * Extract developer names from GamesDB game data
+ */
+ private fun extractDevelopers(gameData: PyObject?, gameId: String): List {
+ val developers = gameData?.callAttr("get", "developers") ?: return emptyList()
+
+ return try {
+ val developersList = mutableListOf()
+ val length = developers.callAttr("__len__")?.toInt() ?: 0
+ for (i in 0 until length) {
+ val dev = developers.callAttr("__getitem__", i)
+ val devName = dev?.callAttr("get", "name")?.toString()
+ if (!devName.isNullOrEmpty()) {
+ developersList.add(devName)
+ }
+ }
+ developersList
+ } catch (e: Exception) {
+ Timber.w(e, "Error parsing developers for game $gameId")
+ emptyList()
+ }
+ }
+
+ /**
+ * Extract publisher names from GamesDB game data
+ */
+ private fun extractPublishers(gameData: PyObject?, gameId: String): List {
+ val publishers = gameData?.callAttr("get", "publishers") ?: return emptyList()
+
+ return try {
+ val publishersList = mutableListOf()
+ val length = publishers.callAttr("__len__")?.toInt() ?: 0
+ for (i in 0 until length) {
+ val pub = publishers.callAttr("__getitem__", i)
+ val pubName = pub?.callAttr("get", "name")?.toString()
+ if (!pubName.isNullOrEmpty()) {
+ publishersList.add(pubName)
+ }
+ }
+ publishersList
+ } catch (e: Exception) {
+ Timber.w(e, "Error parsing publishers for game $gameId")
+ emptyList()
+ }
+ }
+
+ /**
+ * Fetch detailed information for a specific GOG game
+ */
+ private suspend fun fetchGameDetails(gameId: String, accessToken: String): GOGGame? = withContext(Dispatchers.IO) {
+ try {
+ val python = Python.getInstance()
+ val requests = python.getModule("requests")
+
+ // First get rich metadata from GamesDB
+ val metadata = fetchGamesDBMetadata(gameId)
+
+ // Now fetch basic product info from the standard GOG API
+ val url = "https://api.gog.com/products/$gameId"
+
+ // Create headers dictionary
+ val pyDict = python.builtins.callAttr("dict")
+ pyDict.callAttr("__setitem__", "Authorization", "Bearer $accessToken")
+ pyDict.callAttr("__setitem__", "User-Agent", "GOGGalaxyClient/2.0.45.61 (Windows_x86_64)")
+
+ Timber.d("Fetching GOG game details for ID: $gameId")
+
+ val response = requests.callAttr(
+ "get", url,
+ Kwarg("headers", pyDict),
+ Kwarg("timeout", 10),
+ )
+
+ val statusCode = response.get("status_code")?.toInt() ?: 0
+
+ if (statusCode == 200) {
+ val gameJson = response.callAttr("json")
+
+ // Extract game information, using GamesDB data as fallback
+ val title = gameJson?.callAttr("get", "title")?.toString() ?: metadata.title ?: "Unknown Game"
+ val slug = gameJson?.callAttr("get", "slug")?.toString() ?: gameId
+
+ // Check the game_type field for filtering
+ val gameType = gameJson?.callAttr("get", "game_type")?.toString() ?: ""
+
+ // Filter based on game_type - only keep if it's a proper game
+ if (gameType != "game") {
+ return@withContext null
+ }
+
+ // Get description - prefer GamesDB but fallback to product API
+ val description = metadata.description ?: try {
+ gameJson?.callAttr("get", "description")?.callAttr("get", "full")?.toString()
+ ?: gameJson?.callAttr("get", "description")?.toString()
+ ?: ""
+ } catch (e: Exception) {
+ ""
+ }
+
+ // Get best available image URL - try different types in order of preference
+ val imageUrl = try {
+ val images = gameJson?.callAttr("get", "images")
+ if (images != null) {
+ // Try logo2x (high resolution) first, then logo, then other options
+ val imageTypes = listOf("logo2x", "logo", "icon", "background")
+
+ var foundUrl = ""
+ for (imageType in imageTypes) {
+ val imageData = images.callAttr("get", imageType)?.toString()
+ if (!imageData.isNullOrEmpty()) {
+ // GOG URLs start with // so we need to add https:
+ val fullUrl = if (imageData.startsWith("//")) {
+ "https:$imageData"
+ } else {
+ imageData
+ }
+
+ // Try to upgrade logo images to highest quality background version
+ foundUrl = when {
+ fullUrl.contains("_glx_logo.jpg") -> {
+ val baseUrl = fullUrl.substringBefore("_glx_logo.jpg")
+ "$baseUrl.jpg"
+ }
+ fullUrl.contains("_glx_logo_2x.jpg") -> {
+ val baseUrl = fullUrl.substringBefore("_glx_logo_2x.jpg")
+ "$baseUrl.jpg"
+ }
+ else -> fullUrl
+ }
+
+ Timber.d("Game $gameId - using $imageType image: $fullUrl -> $foundUrl")
+ break // Exit loop once we find a valid URL
+ }
+ }
+ foundUrl
+ } else {
+ ""
+ }
+ } catch (e: Exception) {
+ Timber.w(e, "Game $gameId - error extracting image URL")
+ ""
+ }
+
+ // Get icon URL specifically
+ val iconUrl = try {
+ val images = gameJson?.callAttr("get", "images")
+ val iconData = images?.callAttr("get", "icon")?.toString()
+ if (!iconData.isNullOrEmpty()) {
+ val fullIconUrl = if (iconData.startsWith("//")) {
+ "https:$iconData"
+ } else {
+ iconData
+ }
+ Timber.d("Game $gameId - icon URL: $fullIconUrl")
+ fullIconUrl
+ } else {
+ ""
+ }
+ } catch (e: Exception) {
+ Timber.w(e, "Game $gameId - error extracting icon URL")
+ ""
+ }
+
+ // Developer and publisher info already extracted from GamesDB above
+
+ // Get release date
+ val releaseDate = try {
+ gameJson?.callAttr("get", "release_date")?.toString() ?: ""
+ } catch (e: Exception) {
+ ""
+ }
+
+ Timber.d("Successfully fetched details for game: $title")
+
+ GOGGame(
+ id = gameId,
+ title = title,
+ slug = slug,
+ description = description,
+ imageUrl = imageUrl,
+ iconUrl = iconUrl,
+ developer = metadata.developer,
+ publisher = metadata.publisher,
+ releaseDate = releaseDate,
+ )
+ } else {
+ Timber.w("Failed to fetch game details for $gameId: HTTP $statusCode")
+ null
+ }
+ } catch (e: Exception) {
+ Timber.e(e, "Exception fetching game details for $gameId")
+ null
+ }
+ }
+
+ /**
+ * Enhanced download method with proper progress tracking (bypassing GOGDL completely)
+ */
+ suspend fun downloadGame(gameId: String, installPath: String, authConfigPath: String): Result {
+ return try {
+ Timber.i("Starting GOGDL download with progress parsing for game $gameId")
+
+ val installDir = File(installPath)
+ if (!installDir.exists()) {
+ installDir.mkdirs()
+ }
+
+ // Create DownloadInfo for progress tracking
+ val downloadInfo = DownloadInfo(jobCount = 1)
+
+ // Track this download in the active downloads map
+ getInstance()?.activeDownloads?.put(gameId, downloadInfo)
+
+ // Start GOGDL download with progress parsing
+ val downloadJob = CoroutineScope(Dispatchers.IO).launch {
+ try {
+ // Create support directory for redistributables (like Heroic does)
+ val supportDir = File(installDir.parentFile, "gog-support")
+ supportDir.mkdirs()
+
+ val result = executeCommandWithProgressParsing(
+ downloadInfo,
+ "--auth-config-path", authConfigPath,
+ "download", ContainerUtils.extractGameIdFromContainerId(gameId).toString(),
+ "--platform", "windows",
+ "--path", installPath,
+ "--support", supportDir.absolutePath,
+ "--skip-dlcs",
+ "--lang", "en-US",
+ "--max-workers", "1",
+ )
+
+ if (result.isSuccess) {
+ // Check if the download was actually cancelled
+ if (downloadInfo.isCancelled()) {
+ downloadInfo.setProgress(-1.0f) // Mark as cancelled
+ Timber.i("GOGDL download was cancelled by user")
+ } else {
+ downloadInfo.setProgress(1.0f) // Mark as complete
+ Timber.i("GOGDL download completed successfully")
+ }
+ } else {
+ downloadInfo.setProgress(-1.0f) // Mark as failed
+ Timber.e("GOGDL download failed: ${result.exceptionOrNull()?.message}")
+ }
+ } catch (e: CancellationException) {
+ Timber.i("GOGDL download cancelled by user")
+ downloadInfo.setProgress(-1.0f) // Mark as cancelled
+ } catch (e: Exception) {
+ Timber.e(e, "GOGDL download failed")
+ downloadInfo.setProgress(-1.0f) // Mark as failed
+ } finally {
+ // Clean up the download from active downloads
+ getInstance()?.activeDownloads?.remove(gameId)
+ Timber.d("Cleaned up download for game: $gameId")
+ }
+ }
+
+ // Store the job in DownloadInfo so it can be cancelled
+ downloadInfo.setDownloadJob(downloadJob)
+
+ Result.success(downloadInfo)
+ } catch (e: Exception) {
+ Timber.e(e, "Failed to start GOG game download")
+ Result.failure(e)
+ }
+ }
+
+ private suspend fun executeCommandWithProgressParsing(downloadInfo: DownloadInfo, vararg args: String): Result {
+ return withContext(Dispatchers.IO) {
+ var logMonitorJob: Job? = null
+ try {
+ // Start log monitoring for GOGDL progress (works for both V1 and V2)
+ logMonitorJob = CoroutineScope(Dispatchers.IO).launch {
+ monitorGOGDLProgress(downloadInfo)
+ }
+
+ // Store the progress monitor job in DownloadInfo so it can be cancelled
+ downloadInfo.setProgressMonitorJob(logMonitorJob)
+
+ val python = Python.getInstance()
+ val sys = python.getModule("sys")
+ val originalArgv = sys.get("argv")
+
+ try {
+ val gogdlCli = python.getModule("gogdl.cli")
+
+ // Set up arguments for argparse
+ val argsList = listOf("gogdl") + args.toList()
+ Timber.d("Setting GOGDL arguments for argparse: ${args.joinToString(" ")}")
+ val pythonList = python.builtins.callAttr("list", argsList.toTypedArray())
+ sys.put("argv", pythonList)
+
+ // Check for cancellation before starting
+ ensureActive()
+
+ // Set up cancellation mechanism for Python
+ // Extract game ID from the download command arguments
+ val gameIdFromArgs = args.find { it.matches(Regex("\\d+")) } ?: "unknown"
+ val builtins = python.getModule("builtins")
+
+ // Set a global variable that Python can check
+ builtins.put("GOGDL_CANCEL_${gameIdFromArgs}", false)
+ Timber.i("Set up Python cancellation flag: GOGDL_CANCEL_${gameIdFromArgs}")
+
+ // Execute the main function with periodic cancellation checks
+ val pythonExecutionJob = async(Dispatchers.IO) {
+ gogdlCli.callAttr("main")
+ }
+
+ // Wait for either completion or cancellation
+ while (pythonExecutionJob.isActive) {
+ delay(100) // Check every 100ms
+ ensureActive() // Throw CancellationException if cancelled
+ }
+
+ pythonExecutionJob.await()
+ Timber.d("GOGDL execution completed successfully")
+ Result.success("Download completed")
+ } catch (e: Exception) {
+ Timber.e(e, "GOGDL execution failed: ${e.message}")
+ Result.failure(e)
+ } finally {
+ sys.put("argv", originalArgv)
+ }
+ } catch (e: CancellationException) {
+ Timber.i("GOGDL command cancelled")
+ throw e // Re-throw to propagate cancellation
+ } catch (e: Exception) {
+ Timber.e(e, "Failed to execute GOGDL command: ${args.joinToString(" ")}")
+ Result.failure(e)
+ } finally {
+ logMonitorJob?.cancel()
+ }
+ }
+ }
+
+ /**
+ * Monitor GOGDL progress by parsing log output like Heroic Games Launcher does
+ * Works for both V1 and V2 games using the same progress format
+ */
+ private suspend fun monitorGOGDLProgress(downloadInfo: DownloadInfo) {
+ var process: Process? = null
+ try {
+ // Clear any existing logcat buffer to ensure fresh start
+ try {
+ val clearProcess = ProcessBuilder("logcat", "-c").start()
+ clearProcess.waitFor()
+ Timber.d("Cleared logcat buffer for fresh progress monitoring")
+ } catch (e: Exception) {
+ Timber.w(e, "Failed to clear logcat buffer, continuing anyway")
+ }
+
+ // Add delay to ensure Python process has started and old logs are cleared
+ delay(1000)
+
+ // Use logcat to read python.stderr logs in real-time with timestamp filtering
+ // Only process logs that are newer than when we started
+ val startTime = System.currentTimeMillis()
+ process = ProcessBuilder("logcat", "-s", "python.stderr:W", "-T", "1")
+ .redirectErrorStream(true)
+ .start()
+
+ val reader = process.inputStream.bufferedReader()
+ Timber.d("Progress monitoring logcat process started successfully with timestamp filtering")
+
+ // Track progress state exactly like Heroic does
+ var currentPercent: Float? = null
+ var currentEta: String = ""
+ var currentBytes: String = ""
+ var currentDownSpeed: Float? = null
+ var currentDiskSpeed: Float? = null
+
+ while (downloadInfo.getProgress() < 1.0f && downloadInfo.getProgress() >= 0.0f && !downloadInfo.isCancelled()) {
+ // Check for cancellation before reading each line
+ if (downloadInfo.isCancelled()) {
+ Timber.d("Progress monitoring stopping due to cancellation")
+ break
+ }
+
+ val line = reader.readLine()
+ if (line != null) {
+ // Double-check cancellation after reading line
+ if (downloadInfo.isCancelled()) {
+ Timber.d("Progress monitoring stopping due to cancellation after line read")
+ break
+ }
+ // Parse like Heroic: only update if field is empty/undefined
+
+ // parse log for percent (only if not already set)
+ if (currentPercent == null) {
+ val percentMatch = Regex("""Progress: (\d+\.\d+) """).find(line)
+ if (percentMatch != null) {
+ val percent = percentMatch.groupValues[1].toFloatOrNull()
+ if (percent != null && !percent.isNaN()) {
+ currentPercent = percent
+ }
+ }
+ }
+
+ // parse log for eta (only if empty)
+ if (currentEta.isEmpty()) {
+ val etaMatch = Regex("""ETA: (\d\d:\d\d:\d\d)""").find(line)
+ if (etaMatch != null) {
+ currentEta = etaMatch.groupValues[1]
+ }
+ }
+
+ // parse log for game download progress (only if empty)
+ if (currentBytes.isEmpty()) {
+ val bytesMatch = Regex("""Downloaded: (\S+) MiB""").find(line)
+ if (bytesMatch != null) {
+ currentBytes = "${bytesMatch.groupValues[1]}MB"
+ }
+ }
+
+ // parse log for download speed (only if not set)
+ if (currentDownSpeed == null) {
+ val downSpeedMatch = Regex("""Download\t- (\S+) MiB""").find(line)
+ if (downSpeedMatch != null) {
+ val speed = downSpeedMatch.groupValues[1].toFloatOrNull()
+ if (speed != null && !speed.isNaN()) {
+ currentDownSpeed = speed
+ }
+ }
+ }
+
+ // parse disk write speed (only if not set)
+ if (currentDiskSpeed == null) {
+ val diskSpeedMatch = Regex("""Disk\t- (\S+) MiB""").find(line)
+ if (diskSpeedMatch != null) {
+ val speed = diskSpeedMatch.groupValues[1].toFloatOrNull()
+ if (speed != null && !speed.isNaN()) {
+ currentDiskSpeed = speed
+ }
+ }
+ }
+
+ // only send update if all values are present (exactly like Heroic)
+ if (currentPercent != null && currentEta.isNotEmpty() &&
+ currentBytes.isNotEmpty() && currentDownSpeed != null && currentDiskSpeed != null) {
+
+ // Update progress with the percentage
+ val progress = (currentPercent!! / 100.0f).coerceIn(0.0f, 1.0f)
+ downloadInfo.setProgress(progress)
+
+ // Log exactly like Heroic does
+ Timber.i("Progress for game: ${currentPercent}%/${currentBytes}/${currentEta} Down: ${currentDownSpeed}MB/s / Disk: ${currentDiskSpeed}MB/s")
+
+ // reset (exactly like Heroic does)
+ currentPercent = null
+ currentEta = ""
+ currentBytes = ""
+ currentDownSpeed = null
+ currentDiskSpeed = null
+ }
+ } else {
+ delay(100L) // Brief delay if no new log lines
+ }
+ }
+
+ Timber.d("Progress monitoring loop ended - cancelled: ${downloadInfo.isCancelled()}, progress: ${downloadInfo.getProgress()}")
+ process?.destroyForcibly() // Use destroyForcibly for more aggressive termination
+ Timber.d("Logcat process destroyed forcibly")
+ } catch (e: CancellationException) {
+ Timber.d("GOGDL progress monitoring cancelled")
+ process?.destroyForcibly()
+ throw e
+ } catch (e: Exception) {
+ Timber.w(e, "Error monitoring GOGDL progress, falling back to estimation")
+ // Simple fallback - just wait and set progress to completion
+ var lastProgress = 0.0f
+ val startTime = System.currentTimeMillis()
+
+ while (downloadInfo.getProgress() < 1.0f && downloadInfo.getProgress() >= 0.0f && !downloadInfo.isCancelled()) {
+ delay(2000L)
+ val elapsed = System.currentTimeMillis() - startTime
+ val estimatedProgress = when {
+ elapsed < 5000 -> 0.05f
+ elapsed < 15000 -> 0.20f
+ elapsed < 30000 -> 0.50f
+ elapsed < 60000 -> 0.80f
+ else -> 0.90f
+ }.coerceAtLeast(lastProgress)
+
+ if (estimatedProgress > lastProgress) {
+ downloadInfo.setProgress(estimatedProgress)
+ lastProgress = estimatedProgress
+ }
+ }
+ }
+ }
+
+ /**
+ * Parse GOGDL progress components from log line using Heroic Games Launcher approach
+ * Collects all progress data before updating (prevents partial updates)
+ */
+ private fun parseGOGDLProgressComponents(
+ line: String,
+ onPercent: (Float) -> Unit,
+ onEta: (String) -> Unit,
+ onBytes: (String) -> Unit,
+ onDownSpeed: (Float) -> Unit,
+ onDiskSpeed: (Float) -> Unit
+ ) {
+ try {
+ // Parse progress percentage: "= Progress: 45.67 12345/67890, Running for: 00:01:23, ETA: 00:02:34"
+ val progressRegex = Regex("""= Progress: (\d+\.\d+) .+ETA: (\d\d:\d\d:\d\d)""")
+ val progressMatch = progressRegex.find(line)
+
+ if (progressMatch != null) {
+ val percent = progressMatch.groupValues[1].toFloat()
+ val eta = progressMatch.groupValues[2]
+ onPercent(percent)
+ onEta(eta)
+ return
+ }
+
+ // Parse download progress: "= Downloaded: 123.45 MiB, Written: 234.56 MiB"
+ val downloadedRegex = Regex("""= Downloaded: (\S+) MiB""")
+ val downloadedMatch = downloadedRegex.find(line)
+
+ if (downloadedMatch != null) {
+ val downloadedMB = downloadedMatch.groupValues[1]
+ onBytes("${downloadedMB}MB")
+ return
+ }
+
+ // Parse download speed: " + Download - 12.34 MiB/s (raw) / 23.45 MiB/s (decompressed)"
+ val downloadSpeedRegex = Regex(""" \+ Download\t- (\S+) MiB/s \(raw\)""")
+ val downloadSpeedMatch = downloadSpeedRegex.find(line)
+
+ if (downloadSpeedMatch != null) {
+ val downloadSpeed = downloadSpeedMatch.groupValues[1].toFloat()
+ onDownSpeed(downloadSpeed)
+ return
+ }
+
+ // Parse disk speed: " + Disk - 34.56 MiB/s (write) / 45.67 MiB/s (read)"
+ val diskSpeedRegex = Regex(""" \+ Disk\t- (\S+) MiB/s \(write\)""")
+ val diskSpeedMatch = diskSpeedRegex.find(line)
+
+ if (diskSpeedMatch != null) {
+ val diskSpeed = diskSpeedMatch.groupValues[1].toFloat()
+ onDiskSpeed(diskSpeed)
+ return
+ }
+
+ // Handle completion
+ if (line.contains("download completed") || line.contains("Download completed")) {
+ Timber.i("GOGDL: Download completed")
+ // Force 100% completion
+ onPercent(100.0f)
+ onEta("00:00:00")
+ onBytes("Complete")
+ onDownSpeed(0.0f)
+ onDiskSpeed(0.0f)
+ return
+ }
+
+ } catch (e: Exception) {
+ Timber.w(e, "Error parsing GOGDL progress line: $line")
+ }
+ }
+
+ /**
+ * Parse GOGDL progress from log line using Heroic Games Launcher patterns
+ * Works for both V1 and V2 games since they use the same ExecutingManager/ProgressBar
+ */
+ private fun parseGOGDLProgressLine(line: String, downloadInfo: DownloadInfo): Boolean {
+ try {
+ // Parse progress percentage: "= Progress: 45.67 12345/67890, Running for: 00:01:23, ETA: 00:02:34"
+ val progressRegex = Regex("""= Progress: (\d+\.\d+) """)
+ val progressMatch = progressRegex.find(line)
+
+ if (progressMatch != null) {
+ val percent = progressMatch.groupValues[1].toFloat()
+ val progress = (percent / 100.0f).coerceIn(0.0f, 1.0f)
+ downloadInfo.setProgress(progress)
+ return true
+ }
+
+ // Parse download progress: "= Downloaded: 123.45 MiB, Written: 234.56 MiB"
+ val downloadedRegex = Regex("""= Downloaded: (\S+) MiB""")
+ val downloadedMatch = downloadedRegex.find(line)
+
+ if (downloadedMatch != null) {
+ val downloadedMB = downloadedMatch.groupValues[1]
+ Timber.d("Downloaded: ${downloadedMB}MB")
+ return true
+ }
+
+ // Parse download speed: " + Download - 12.34 MiB/s (raw) / 23.45 MiB/s (decompressed)"
+ val downloadSpeedRegex = Regex(""" \+ Download\t- (\S+) MiB/s \(raw\)""")
+ val downloadSpeedMatch = downloadSpeedRegex.find(line)
+
+ if (downloadSpeedMatch != null) {
+ val downloadSpeed = downloadSpeedMatch.groupValues[1]
+ Timber.d("Download speed: ${downloadSpeed}MB/s")
+ return true
+ }
+
+ // Parse disk speed: " + Disk - 34.56 MiB/s (write) / 45.67 MiB/s (read)"
+ val diskSpeedRegex = Regex(""" \+ Disk\t- (\S+) MiB/s \(write\)""")
+ val diskSpeedMatch = diskSpeedRegex.find(line)
+
+ if (diskSpeedMatch != null) {
+ val diskSpeed = diskSpeedMatch.groupValues[1]
+ Timber.d("Disk speed: ${diskSpeed}MB/s")
+ return true
+ }
+
+ // Log other important GOGDL messages
+ if (line.contains("Starting V1 download") || line.contains("Starting V2 download")) {
+ Timber.i("GOGDL: $line")
+ return true
+ }
+
+ if (line.contains("download completed") || line.contains("Download completed")) {
+ Timber.i("GOGDL: Download completed")
+ downloadInfo.setProgress(1.0f)
+ return true
+ }
+
+ return false
+ } catch (e: Exception) {
+ Timber.w(e, "Error parsing GOGDL progress line: $line")
+ return false
+ }
+ }
+
+ /**
+ * Parse both V1Manager and V2Manager progress from log lines (Heroic approach)
+ */
+ private fun parseGOGDLProgress(line: String, downloadInfo: DownloadInfo) {
+ try {
+ // Parse V1Manager progress: "[V1Manager] INFO: Completed 12/16: filename"
+ val v1ProgressRegex = Regex("""\[V1Manager\] INFO: Completed\s+(\d+)/(\d+):\s+(.+)""")
+ val v1Match = v1ProgressRegex.find(line)
+
+ if (v1Match != null) {
+ val completed = v1Match.groupValues[1].toInt()
+ val total = v1Match.groupValues[2].toInt()
+ val filename = v1Match.groupValues[3]
+
+ val progress = (completed.toFloat() / total.toFloat()).coerceIn(0.0f, 1.0f)
+
+ downloadInfo.setProgress(progress)
+ Timber.i("V1 Progress: $completed/$total files (${(progress * 100).toInt()}%) - $filename")
+ return
+ }
+
+ // Parse V2Manager progress: "[V2Manager] INFO: Downloading file: filename.exe"
+ val v2FileRegex = Regex("""\[V2Manager\] INFO: Downloading file:\s+(.+)""")
+ val v2FileMatch = v2FileRegex.find(line)
+
+ if (v2FileMatch != null) {
+ val filename = v2FileMatch.groupValues[1]
+ // For V2, we don't have total file count, so use incremental progress
+ val currentProgress = downloadInfo.getProgress()
+ val increment = 0.05f // 5% per file
+ val newProgress = (currentProgress + increment).coerceAtMost(0.95f)
+
+ downloadInfo.setProgress(newProgress)
+ Timber.i("V2 Progress: Downloading $filename (${(newProgress * 100).toInt()}%)")
+ return
+ }
+
+ // Parse V2Manager chunk progress: "[V2Manager] INFO: Downloading chunk 3/5 for filename.exe"
+ val v2ChunkRegex = Regex("""\[V2Manager\] INFO: Downloading chunk\s+(\d+)/(\d+)\s+for\s+(.+)""")
+ val v2ChunkMatch = v2ChunkRegex.find(line)
+
+ if (v2ChunkMatch != null) {
+ val currentChunk = v2ChunkMatch.groupValues[1].toInt()
+ val totalChunks = v2ChunkMatch.groupValues[2].toInt()
+ val filename = v2ChunkMatch.groupValues[3]
+
+ // For chunk progress, add smaller increments
+ val currentProgress = downloadInfo.getProgress()
+ val chunkIncrement = 0.01f // 1% per chunk
+ val newProgress = (currentProgress + chunkIncrement).coerceAtMost(0.95f)
+
+ downloadInfo.setProgress(newProgress)
+ Timber.d("V2 Chunk Progress: $currentChunk/$totalChunks for $filename (${(newProgress * 100).toInt()}%)")
+ return
+ }
+
+ // Parse V2Manager depot info: "[V2Manager] INFO: Depot contains 25 files"
+ val v2DepotRegex = Regex("""\[V2Manager\] INFO: Depot contains\s+(\d+)\s+files""")
+ val v2DepotMatch = v2DepotRegex.find(line)
+
+ if (v2DepotMatch != null) {
+ val totalFiles = v2DepotMatch.groupValues[1].toInt()
+ Timber.i("V2 Download: Depot contains $totalFiles files")
+ // Set initial progress
+ downloadInfo.setProgress(0.05f)
+ return
+ }
+
+ // Check for completion (both V1 and V2)
+ if ((line.contains("All") && line.contains("files downloaded successfully")) ||
+ line.contains("Download completed successfully") ||
+ line.contains("Installation completed")
+ ) {
+ downloadInfo.setProgress(1.0f)
+ Timber.i("Download completed successfully")
+ return
+ }
+
+ // Check for errors (both V1 and V2)
+ if (line.contains("ERROR") || line.contains("Failed")) {
+ Timber.w("Download error detected: $line")
+ return
+ }
+ } catch (e: Exception) {
+ Timber.w("Error parsing progress: ${e.message}")
+ }
+ }
+
+ /**
+ * Calculate the total size of all files in a directory
+ */
+ private fun calculateDirectorySize(directory: File): Long {
+ var size = 0L
+ try {
+ directory.walkTopDown().forEach { file ->
+ if (file.isFile) {
+ size += file.length()
+ }
+ }
+ } catch (e: Exception) {
+ Timber.w(e, "Error calculating directory size")
+ }
+ return size
+ }
+
+ /**
+ * Sync GOG cloud saves for a game
+ */
+ suspend fun syncCloudSaves(gameId: String, savePath: String, authConfigPath: String, timestamp: Float = 0.0f): Result {
+ return try {
+ Timber.i("Starting GOG cloud save sync for game $gameId")
+
+ val result = executeCommand(
+ "--auth-config-path", authConfigPath,
+ "save-sync", savePath,
+ "--dirname", gameId,
+ "--timestamp", timestamp.toString(),
+ )
+
+ if (result.isSuccess) {
+ Timber.i("GOG cloud save sync completed successfully for game $gameId")
+ Result.success(Unit)
+ } else {
+ val error = result.exceptionOrNull() ?: Exception("Save sync failed")
+ Timber.e(error, "GOG cloud save sync failed for game $gameId")
+ Result.failure(error)
+ }
+ } catch (e: Exception) {
+ Timber.e(e, "GOG cloud save sync exception for game $gameId")
+ Result.failure(e)
+ }
+ }
+
+ /**
+ * Check if user is authenticated by testing GOGDL command
+ */
+ fun hasStoredCredentials(context: Context): Boolean {
+ val authFile = File(context.filesDir, "gog_auth.json")
+ return authFile.exists()
+ }
+
+ /**
+ * Get user credentials by calling GOGDL auth command (without --code)
+ * This will automatically handle token refresh if needed
+ */
+ suspend fun getStoredCredentials(context: Context): Result {
+ return try {
+ val authConfigPath = "${context.filesDir}/gog_auth.json"
+
+ if (!hasStoredCredentials(context)) {
+ return Result.failure(Exception("No stored credentials found"))
+ }
+
+ // Use GOGDL to get credentials - this will handle token refresh automatically
+ val result = executeCommand("--auth-config-path", authConfigPath, "auth")
+
+ if (result.isSuccess) {
+ val output = result.getOrNull() ?: ""
+ Timber.d("GOGDL credentials output: $output")
+
+ try {
+ val credentialsJson = JSONObject(output.trim())
+
+ // Check if there's an error
+ if (credentialsJson.has("error") && credentialsJson.getBoolean("error")) {
+ val errorMsg = credentialsJson.optString("message", "Authentication failed")
+ Timber.e("GOGDL credentials failed: $errorMsg")
+ return Result.failure(Exception("Authentication failed: $errorMsg"))
+ }
+
+ // Extract credentials from GOGDL response
+ val accessToken = credentialsJson.optString("access_token", "")
+ val refreshToken = credentialsJson.optString("refresh_token", "")
+ val username = credentialsJson.optString("username", "GOG User")
+ val userId = credentialsJson.optString("user_id", "")
+
+ val credentials = GOGCredentials(
+ accessToken = accessToken,
+ refreshToken = refreshToken,
+ username = username,
+ userId = userId,
+ )
+
+ Timber.d("Got credentials for user: $username")
+ Result.success(credentials)
+ } catch (e: Exception) {
+ Timber.e(e, "Failed to parse GOGDL credentials response")
+ Result.failure(e)
+ }
+ } else {
+ Timber.e("GOGDL credentials command failed")
+ Result.failure(Exception("Failed to get credentials from GOG"))
+ }
+ } catch (e: Exception) {
+ Timber.e(e, "Failed to get stored credentials via GOGDL")
+ Result.failure(e)
+ }
+ }
+
+ /**
+ * Validate credentials by calling GOGDL auth command (without --code)
+ * This will automatically refresh tokens if they're expired
+ */
+ suspend fun validateCredentials(context: Context): Result {
+ return try {
+ val authConfigPath = "${context.filesDir}/gog_auth.json"
+
+ if (!hasStoredCredentials(context)) {
+ Timber.d("No stored credentials found for validation")
+ return Result.success(false)
+ }
+
+ Timber.d("Starting credentials validation with GOGDL")
+
+ // Use GOGDL to get credentials - this will handle token refresh automatically
+ val result = executeCommand("--auth-config-path", authConfigPath, "auth")
+
+ if (!result.isSuccess) {
+ val error = result.exceptionOrNull()
+ Timber.e("Credentials validation failed - command failed: ${error?.message}")
+ return Result.success(false)
+ }
+
+ val output = result.getOrNull() ?: ""
+ Timber.d("GOGDL validation output: $output")
+
+ try {
+ val credentialsJson = JSONObject(output.trim())
+
+ // Check if there's an error
+ if (credentialsJson.has("error") && credentialsJson.getBoolean("error")) {
+ val errorDesc = credentialsJson.optString("message", "Unknown error")
+ Timber.e("Credentials validation failed: $errorDesc")
+ return Result.success(false)
+ }
+
+ Timber.d("Credentials validation successful")
+ return Result.success(true)
+ } catch (e: Exception) {
+ Timber.e(e, "Failed to parse validation response: $output")
+ return Result.success(false)
+ }
+ } catch (e: Exception) {
+ Timber.e(e, "Failed to validate credentials")
+ return Result.failure(e)
+ }
+ }
+
+ /**
+ * Get GOG library with progressive processing
+ * This processes games one by one as they're fetched, without making additional API calls
+ */
+ private suspend fun getLibraryProgressively(
+ authConfigPath: String,
+ onGameFetched: suspend (GOGGame) -> Unit,
+ onTotalCount: (Int) -> Unit,
+ ): Result {
+ return try {
+ Timber.i("Getting GOG library progressively...")
+
+ // Read auth credentials using extracted function
+ val credentialsResult = readAuthCredentials(authConfigPath)
+ if (credentialsResult.isFailure) {
+ return Result.failure(credentialsResult.exceptionOrNull()!!)
+ }
+
+ val (accessToken, userId) = credentialsResult.getOrThrow()
+
+ // Use Python requests to call GOG Galaxy API
+ val python = Python.getInstance()
+ val requests = python.getModule("requests")
+
+ val url = "https://embed.gog.com/user/data/games"
+
+ // Convert Kotlin Map to Python dictionary to avoid LinkedHashMap issues
+ val pyDict = python.builtins.callAttr("dict")
+ pyDict.callAttr("__setitem__", "Authorization", "Bearer $accessToken")
+ pyDict.callAttr("__setitem__", "User-Agent", "GOGGalaxyClient/2.0.45.61 (Windows_x86_64)")
+
+ Timber.d("Making GOG API request to: $url")
+ Timber.d("Request headers: Authorization=Bearer ${accessToken.take(20)}..., User-Agent=GOGGalaxyClient/2.0.45.61")
+
+ // Make the request with headers - pass as separate arguments
+ val response = requests.callAttr(
+ "get", url,
+ Kwarg("headers", pyDict),
+ Kwarg("timeout", 30),
+ )
+
+ val statusCode = response.get("status_code")?.toInt() ?: 0
+ Timber.d("GOG API response status: $statusCode")
+
+ if (statusCode == 200) {
+ val responseJson = response.callAttr("json")
+ Timber.d("GOG API response JSON: $responseJson")
+
+ // Try different ways to access the owned array
+ val ownedGames = try {
+ responseJson?.callAttr("get", "owned")
+ } catch (e: Exception) {
+ Timber.w("Failed to get owned with callAttr: ${e.message}")
+ try {
+ responseJson?.get("owned")
+ } catch (e2: Exception) {
+ Timber.w("Failed to get owned with get: ${e2.message}")
+ null
+ }
+ }
+
+ Timber.d("GOG API owned games: $ownedGames")
+
+ // Count the owned game IDs
+ val gameCount = ownedGames?.callAttr("__len__")?.toInt() ?: 0
+ Timber.i("GOG library retrieved: $gameCount game IDs found")
+
+ // Notify total count first
+ onTotalCount(gameCount)
+
+ // Convert Python list to Kotlin list of game IDs and process them progressively
+ var processedCount = 0
+ if (ownedGames != null && gameCount > 0) {
+ for (i in 0 until gameCount) {
+ try {
+ val gameId = ownedGames.callAttr("__getitem__", i)?.toString()
+ if (gameId != null) {
+ // Fetch details for this specific game
+ val gameDetails = fetchGameDetails(gameId, accessToken)
+ if (gameDetails != null) {
+ onGameFetched(gameDetails)
+ processedCount++
+
+ // Small delay to allow UI updates
+ kotlinx.coroutines.delay(10)
+ }
+ }
+ } catch (e: Exception) {
+ Timber.w("Failed to process game at index $i: ${e.message}")
+ }
+ }
+ }
+
+ Timber.i("Successfully processed $processedCount games progressively")
+ Result.success(processedCount)
+ } else {
+ val errorText = response.callAttr("text")?.toString() ?: "Unknown error"
+ Timber.e("GOG API error: HTTP $statusCode - $errorText")
+ Result.failure(Exception("Failed to get library: HTTP $statusCode"))
+ }
+ } catch (e: Exception) {
+ Timber.e(e, "GOG library exception")
+ Result.failure(e)
+ }
+ }
+
+ /**
+ * Get user library progressively by calling GOG Galaxy API directly
+ * This inserts games one by one as they are fetched, providing real-time updates
+ */
+ suspend fun getUserLibraryProgressively(
+ context: Context,
+ onGameFetched: suspend (GOGGame) -> Unit,
+ onTotalCount: (Int) -> Unit,
+ ): Result {
+ return try {
+ val authConfigPath = "${context.filesDir}/gog_auth.json"
+
+ if (!hasStoredCredentials(context)) {
+ return Result.failure(Exception("No stored credentials found"))
+ }
+
+ // Use the true progressive method that fetches games one by one
+ getLibraryProgressively(authConfigPath, onGameFetched, onTotalCount)
+ } catch (e: Exception) {
+ Timber.e(e, "GOG library exception")
+ Result.failure(e)
+ }
+ }
+
+ fun clearStoredCredentials(context: Context): Boolean {
+ return try {
+ val authFile = File(context.filesDir, "gog_auth.json")
+ if (authFile.exists()) {
+ authFile.delete()
+ } else {
+ true
+ }
+ } catch (e: Exception) {
+ Timber.e(e, "Failed to clear GOG credentials")
+ false
+ }
+ }
+
+ // Enhanced hasActiveOperations to track background sync
+ fun hasActiveOperations(): Boolean {
+ return syncInProgress || backgroundSyncJob?.isActive == true
+ }
+
+ // Add methods to control sync state
+ private fun setSyncInProgress(inProgress: Boolean) {
+ syncInProgress = inProgress
+ }
+
+ fun isSyncInProgress(): Boolean = syncInProgress
+
+ fun getInstance(): GOGService? = instance
+
+ /**
+ * Check if any download is currently active
+ */
+ fun hasActiveDownload(): Boolean {
+ return getInstance()?.activeDownloads?.isNotEmpty() ?: false
+ }
+
+ /**
+ * Get the currently downloading game ID (for error messages)
+ */
+ fun getCurrentlyDownloadingGame(): String? {
+ return getInstance()?.activeDownloads?.keys?.firstOrNull()
+ }
+
+ /**
+ * Get download info for a specific game
+ */
+ fun getDownloadInfo(gameId: String): DownloadInfo? {
+ return getInstance()?.activeDownloads?.get(gameId)
+ }
+
+
+ /**
+ * Clean up active download when game is deleted
+ */
+ fun cleanupDownload(gameId: String) {
+ getInstance()?.activeDownloads?.remove(gameId)
+ }
+
+ /**
+ * Cancel an active download for a specific game
+ */
+ fun cancelDownload(gameId: String): Boolean {
+ val instance = getInstance()
+ val downloadInfo = instance?.activeDownloads?.get(gameId)
+
+ return if (downloadInfo != null) {
+ Timber.i("Cancelling download for game: $gameId")
+
+ try {
+ // Signal Python to cancel the download
+ val gameIdNum = ContainerUtils.extractGameIdFromContainerId(gameId)
+ val python = Python.getInstance()
+ val builtins = python.getModule("builtins")
+ builtins.put("GOGDL_CANCEL_${gameIdNum}", true)
+ Timber.i("Set Python cancellation flag for game: $gameIdNum")
+
+ // Verify the flag was set
+ val flagValue = builtins.get("GOGDL_CANCEL_${gameIdNum}")
+ Timber.i("Verified Python cancellation flag value: $flagValue")
+
+ } catch (e: Exception) {
+ Timber.e(e, "Failed to set Python cancellation flag")
+ }
+
+ // Cancel the Kotlin coroutine
+ downloadInfo.cancel()
+ Timber.d("Cancelled download job and progress monitor job for game: $gameId")
+
+ // Clean up immediately
+ instance.activeDownloads.remove(gameId)
+ Timber.d("Removed game from active downloads: $gameId")
+ true
+ } else {
+ Timber.w("No active download found for game: $gameId")
+ false
+ }
+ }
+
+ /**
+ * Get download and install size information using gogdl info command
+ * Uses the same CLI pattern as existing download methods
+ */
+ suspend fun getGameSizeInfo(gameId: String): GameSizeInfo? = withContext(Dispatchers.IO) {
+ try {
+ val authConfigPath = "/data/data/app.gamenative/files/gog_config.json"
+
+ Timber.d("Getting size info for GOG game: $gameId")
+
+ // Use the same executeCommand pattern as existing methods
+ val result = executeCommand("--auth-config-path", authConfigPath, "info", gameId, "--platform", "windows")
+
+ if (result.isSuccess) {
+ val output = result.getOrNull() ?: ""
+ Timber.d("Got gogdl info output: $output")
+
+ if (output.isNotEmpty()) {
+ try {
+ // Parse JSON output from gogdl info command
+ val jsonResponse = JSONObject(output.trim())
+
+ // Debug: Log the full JSON structure
+ Timber.d("Full gogdl info JSON response: $output")
+
+ // Extract size information from the JSON response
+ val sizeInfo = jsonResponse.optJSONObject("size")
+ Timber.d("Size info object: $sizeInfo")
+
+ var maxDownloadSize = 0L
+ var maxDiskSize = 0L
+
+ if (sizeInfo != null) {
+ // Iterate through all language keys to find the largest size
+ val keys = sizeInfo.keys()
+ while (keys.hasNext()) {
+ val key = keys.next()
+ val languageSize = sizeInfo.optJSONObject(key)
+ if (languageSize != null) {
+ val downloadSize = languageSize.optLong("download_size", 0L)
+ val diskSize = languageSize.optLong("disk_size", 0L)
+
+ Timber.d("Language '$key' sizes - Download: $downloadSize bytes, Disk: $diskSize bytes")
+
+ // Keep track of the largest sizes (usually the full game language pack)
+ if (downloadSize > maxDownloadSize) {
+ maxDownloadSize = downloadSize
+ }
+ if (diskSize > maxDiskSize) {
+ maxDiskSize = diskSize
+ }
+ }
+ }
+ }
+
+ Timber.d("Final max sizes - Download: $maxDownloadSize bytes, Disk: $maxDiskSize bytes")
+
+ if (maxDownloadSize > 0 || maxDiskSize > 0) {
+ Timber.d("Got size info for $gameId - Download: ${app.gamenative.utils.StorageUtils.formatBinarySize(maxDownloadSize)}, Disk: ${app.gamenative.utils.StorageUtils.formatBinarySize(maxDiskSize)}")
+ return@withContext GameSizeInfo(maxDownloadSize, maxDiskSize)
+ }
+ } catch (e: Exception) {
+ Timber.w(e, "Failed to parse gogdl info JSON output")
+ }
+ }
+ } else {
+ Timber.w("GOGDL info command failed: ${result.exceptionOrNull()?.message}")
+ }
+
+ return@withContext null
+ } catch (e: Exception) {
+ Timber.w(e, "Failed to get size info for game $gameId")
+ return@withContext null
+ }
+ }
+ }
+
+ // Add these for foreground service support
+ private lateinit var notificationHelper: NotificationHelper
+
+ @Inject
+ lateinit var gogLibraryManager: GOGLibraryManager
+
+ private val scope = CoroutineScope(Dispatchers.IO + SupervisorJob())
+
+ // Track active downloads by game ID
+ private val activeDownloads = ConcurrentHashMap()
+
+ override fun onCreate() {
+ super.onCreate()
+ instance = this
+
+ // Initialize notification helper for foreground service
+ notificationHelper = NotificationHelper(applicationContext)
+ }
+
+ override fun onStartCommand(intent: Intent?, flags: Int, startId: Int): Int {
+ // Start as foreground service
+ val notification = notificationHelper.createForegroundNotification("GOG Service running...")
+ startForeground(2, notification) // Use different ID than SteamService (which uses 1)
+
+ // Start background library sync automatically when service starts with tracking
+ backgroundSyncJob = scope.launch {
+ try {
+ setSyncInProgress(true)
+ Timber.d("[GOGService]: Starting background library sync")
+
+ val syncResult = gogLibraryManager.startBackgroundSync(applicationContext)
+ if (syncResult.isFailure) {
+ Timber.w("[GOGService]: Failed to start background sync: ${syncResult.exceptionOrNull()?.message}")
+ } else {
+ Timber.i("[GOGService]: Background library sync started successfully")
+ }
+ } catch (e: Exception) {
+ Timber.e(e, "[GOGService]: Exception starting background sync")
+ } finally {
+ setSyncInProgress(false)
+ }
+ }
+
+ return START_STICKY
+ }
+
+ override fun onDestroy() {
+ super.onDestroy()
+
+ // Cancel sync operations
+ backgroundSyncJob?.cancel()
+ setSyncInProgress(false)
+
+ scope.cancel() // Cancel any ongoing operations
+ stopForeground(STOP_FOREGROUND_REMOVE)
+ notificationHelper.cancel()
+ instance = null
+ }
+
+ override fun onBind(intent: Intent?): IBinder? = null
+}
diff --git a/app/src/main/java/app/gamenative/service/GameManager.kt b/app/src/main/java/app/gamenative/service/GameManager.kt
index 05ef84df8..8f5cd5431 100644
--- a/app/src/main/java/app/gamenative/service/GameManager.kt
+++ b/app/src/main/java/app/gamenative/service/GameManager.kt
@@ -59,7 +59,7 @@ interface GameManager {
/**
* Get the download size for a game
*/
- fun getDownloadSize(libraryItem: LibraryItem): String
+ suspend fun getDownloadSize(libraryItem: LibraryItem): String
/**
* Check if a game is valid to download
@@ -116,6 +116,12 @@ interface GameManager {
*/
fun getHeroImage(libraryItem: LibraryItem): String
+
+ /**
+ * Get the icon image for the given game
+ */
+ fun getIconImage(libraryItem: LibraryItem): String
+
/**
* Returns the install info dialog for the given game
*/
diff --git a/app/src/main/java/app/gamenative/service/GameManagerService.kt b/app/src/main/java/app/gamenative/service/GameManagerService.kt
index 06cfdf785..d9d59a368 100644
--- a/app/src/main/java/app/gamenative/service/GameManagerService.kt
+++ b/app/src/main/java/app/gamenative/service/GameManagerService.kt
@@ -9,6 +9,7 @@ import app.gamenative.data.LaunchInfo
import app.gamenative.data.LibraryItem
import app.gamenative.data.PostSyncInfo
import app.gamenative.data.SteamApp
+import app.gamenative.service.GOG.GOGGameManager
import app.gamenative.service.Steam.SteamGameManager
import app.gamenative.ui.component.dialog.state.MessageDialogState
import app.gamenative.utils.ContainerUtils
@@ -30,6 +31,7 @@ import timber.log.Timber
@Singleton
class GameManagerService @Inject constructor(
private val steamGameManager: SteamGameManager,
+ private val gogGameManager: GOGGameManager,
// Add new game sources here
) {
companion object {
@@ -48,6 +50,7 @@ class GameManagerService @Inject constructor(
// Set up default game managers using the real steamGameManager
gameManagers = mapOf(
GameSource.STEAM to serviceInstance.steamGameManager,
+ GameSource.GOG to serviceInstance.gogGameManager,
// Add new game sources here
)
}
@@ -91,6 +94,10 @@ class GameManagerService @Inject constructor(
fun downloadGame(context: Context, libraryItem: LibraryItem): DownloadInfo? {
return getManagerForGame(libraryItem).downloadGame(context, libraryItem).getOrNull()
}
+
+ fun downloadGameWithResult(context: Context, libraryItem: LibraryItem): Result {
+ return getManagerForGame(libraryItem).downloadGame(context, libraryItem)
+ }
fun hasPartialDownload(libraryItem: LibraryItem): Boolean {
return getManagerForGame(libraryItem).hasPartialDownload(libraryItem)
@@ -150,7 +157,7 @@ class GameManagerService @Inject constructor(
* We may need to quickly get the container name in places that aren't using LibraryItem yet
*/
fun getAppId(gameId: Int, gameSource: GameSource): String {
- return gameSource.name+"_"+gameId
+ return gameSource.name + "_" + gameId
}
/**
@@ -172,7 +179,7 @@ class GameManagerService @Inject constructor(
return getManagerForGameSource(gameSource).createLibraryItem(appId, gameId.toString(), context)
}
- fun getDownloadSize(libraryItem: LibraryItem): String {
+ suspend fun getDownloadSize(libraryItem: LibraryItem): String {
return getManagerForGame(libraryItem).getDownloadSize(libraryItem)
}
@@ -192,6 +199,10 @@ class GameManagerService @Inject constructor(
return getManagerForGame(libraryItem).getHeroImage(libraryItem)
}
+ fun getIconImage(libraryItem: LibraryItem): String {
+ return getManagerForGame(libraryItem).getIconImage(libraryItem)
+ }
+
fun getInstallInfoDialog(context: Context, libraryItem: LibraryItem): MessageDialogState {
return getManagerForGame(libraryItem).getInstallInfoDialog(context, libraryItem)
}
diff --git a/app/src/main/java/app/gamenative/service/Steam/SteamGameManager.kt b/app/src/main/java/app/gamenative/service/Steam/SteamGameManager.kt
index 9e69d73e5..bc2d55e48 100644
--- a/app/src/main/java/app/gamenative/service/Steam/SteamGameManager.kt
+++ b/app/src/main/java/app/gamenative/service/Steam/SteamGameManager.kt
@@ -3,6 +3,7 @@ package app.gamenative.service.Steam
import android.content.Context
import android.net.Uri
import androidx.core.net.toUri
+import app.gamenative.Constants
import app.gamenative.R
import app.gamenative.data.DownloadInfo
import app.gamenative.data.Game
@@ -220,8 +221,10 @@ class SteamGameManager @Inject constructor(
)
}
- override fun getDownloadSize(libraryItem: LibraryItem): String {
- return DownloadService.getSizeFromStoreDisplay(libraryItem.gameId)
+ override suspend fun getDownloadSize(libraryItem: LibraryItem): String {
+ return withContext(Dispatchers.IO) {
+ DownloadService.getSizeFromStoreDisplay(libraryItem.gameId)
+ }
}
override fun isValidToDownload(libraryItem: LibraryItem): Boolean {
@@ -247,6 +250,10 @@ class SteamGameManager @Inject constructor(
return appInfo?.getHeroUrl() ?: ""
}
+ override fun getIconImage(libraryItem: LibraryItem): String {
+ return Constants.Library.ICON_URL + "${libraryItem.gameId}/${libraryItem.iconHash}.ico"
+ }
+
override fun getInstallInfoDialog(context: Context, libraryItem: LibraryItem): MessageDialogState {
val depots = SteamService.getDownloadableDepots(libraryItem.gameId)
Timber.i("There are ${depots.size} depots belonging to ${libraryItem.gameId}")
diff --git a/app/src/main/java/app/gamenative/ui/PluviaMain.kt b/app/src/main/java/app/gamenative/ui/PluviaMain.kt
index 2d8ab5227..b693955ab 100644
--- a/app/src/main/java/app/gamenative/ui/PluviaMain.kt
+++ b/app/src/main/java/app/gamenative/ui/PluviaMain.kt
@@ -2,6 +2,7 @@ package app.gamenative.ui
import android.content.Context
import android.content.Intent
+import android.widget.Toast
import androidx.compose.foundation.isSystemInDarkTheme
import androidx.compose.foundation.layout.Box
import androidx.compose.foundation.layout.Column
@@ -24,7 +25,6 @@ import androidx.compose.ui.platform.LocalContext
import androidx.compose.ui.platform.LocalUriHandler
import androidx.compose.ui.unit.dp
import androidx.compose.ui.zIndex
-import android.widget.Toast
import androidx.hilt.navigation.compose.hiltViewModel
import androidx.lifecycle.Lifecycle
import androidx.lifecycle.LifecycleOwner
@@ -52,6 +52,7 @@ import app.gamenative.enums.LoginResult
import app.gamenative.enums.SaveLocation
import app.gamenative.enums.SyncResult
import app.gamenative.events.AndroidEvent
+import app.gamenative.service.GOG.GOGService
import app.gamenative.service.GameManagerService
import app.gamenative.service.SteamService
import app.gamenative.ui.component.dialog.GameFeedbackDialog
@@ -85,7 +86,6 @@ import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.delay
import kotlinx.coroutines.launch
import timber.log.Timber
-import io.ktor.client.plugins.HttpTimeout
@Composable
fun PluviaMain(
@@ -309,7 +309,7 @@ fun PluviaMain(
is MainViewModel.MainUiEvent.ShowGameFeedbackDialog -> {
gameFeedbackState = GameFeedbackDialogState(
visible = true,
- appId = event.appId
+ appId = event.appId,
)
}
@@ -370,6 +370,11 @@ fun PluviaMain(
if (SteamService.isLoggedIn && state.currentScreen == PluviaScreen.LoginUser) {
navController.navigate(PluviaScreen.Home.route)
}
+
+ if (GOGService.hasStoredCredentials(context) && !GOGService.isRunning) {
+ Timber.d("[PluviaMain]: GOG credentials found - starting GOG service")
+ GOGService.start(context)
+ }
}
}
@@ -398,7 +403,7 @@ fun PluviaMain(
val onShowGameFeedback: (AndroidEvent.ShowGameFeedback) -> Unit = { event ->
gameFeedbackState = GameFeedbackDialogState(
visible = true,
- appId = event.appId
+ appId = event.appId,
)
}
@@ -681,7 +686,7 @@ fun PluviaMain(
appId = appId,
rating = feedbackState.rating,
tags = feedbackState.selectedTags.toList(),
- notes = feedbackState.feedbackText.takeIf { it.isNotBlank() }
+ notes = feedbackState.feedbackText.takeIf { it.isNotBlank() },
)
Timber.d("GameFeedback: Submission returned $result")
@@ -711,7 +716,7 @@ fun PluviaMain(
},
onDiscordSupport = {
uriHandler.openUri("https://discord.gg/2hKv4VfZfE")
- }
+ },
)
Box(modifier = Modifier.zIndex(10f)) {
@@ -955,7 +960,7 @@ fun preLaunchApp(
title = context.getString(R.string.sync_error_title),
message = "Failed to sync save files: ${postSyncInfo.syncResult}. Continuing can cause sync conflicts and lost data.\n\nYOU MAY LOSE SAVE DATA!",
dismissBtnText = "Cancel",
- confirmBtnText = "Launch anyway"
+ confirmBtnText = "Launch anyway",
),
)
}
diff --git a/app/src/main/java/app/gamenative/ui/enums/AppFilter.kt b/app/src/main/java/app/gamenative/ui/enums/AppFilter.kt
index e891f2acf..02eef7d55 100644
--- a/app/src/main/java/app/gamenative/ui/enums/AppFilter.kt
+++ b/app/src/main/java/app/gamenative/ui/enums/AppFilter.kt
@@ -5,7 +5,9 @@ import androidx.compose.material.icons.filled.AvTimer
import androidx.compose.material.icons.filled.Build
import androidx.compose.material.icons.filled.Computer
import androidx.compose.material.icons.filled.Diversity3
+import androidx.compose.material.icons.filled.Games
import androidx.compose.material.icons.filled.InstallMobile
+import androidx.compose.material.icons.filled.LibraryBooks
import androidx.compose.material.icons.filled.VideogameAsset
import androidx.compose.ui.graphics.vector.ImageVector
import app.gamenative.enums.AppType
@@ -46,6 +48,16 @@ enum class AppFilter(
displayText = "Family",
icon = Icons.Default.Diversity3,
),
+ STEAM(
+ code = 0x40,
+ displayText = "Steam",
+ icon = Icons.Default.Games,
+ ),
+ GOG(
+ code = 0x80,
+ displayText = "GOG",
+ icon = Icons.Default.LibraryBooks,
+ ),
// ALPHABETIC(
// code = 0x20,
// displayText = "Alphabetic",
diff --git a/app/src/main/java/app/gamenative/ui/internal/FakeGameManager.kt b/app/src/main/java/app/gamenative/ui/internal/FakeGameManager.kt
index 7fcae1cbd..8e8a41383 100644
--- a/app/src/main/java/app/gamenative/ui/internal/FakeGameManager.kt
+++ b/app/src/main/java/app/gamenative/ui/internal/FakeGameManager.kt
@@ -66,7 +66,7 @@ object FakeGameManager : GameManager {
)
}
- override fun getDownloadSize(libraryItem: LibraryItem): String = "1.5 GB"
+ override suspend fun getDownloadSize(libraryItem: LibraryItem): String = "1.5 GB"
override fun isValidToDownload(library: LibraryItem): Boolean = true
override fun getAppInfo(libraryItem: LibraryItem): SteamApp? = null
override fun getAppDirPath(appId: String): String = "/path/to/fake/app/dir"
@@ -95,6 +95,7 @@ object FakeGameManager : GameManager {
override fun getReleaseDate(libraryItem: LibraryItem): String = "2024-01-01"
override fun getHeroImage(libraryItem: LibraryItem): String = ""
+ override fun getIconImage(libraryItem: LibraryItem): String = ""
override fun getInstallInfoDialog(context: Context, libraryItem: LibraryItem): MessageDialogState {
return MessageDialogState(
diff --git a/app/src/main/java/app/gamenative/ui/model/AccountManagementViewModel.kt b/app/src/main/java/app/gamenative/ui/model/AccountManagementViewModel.kt
new file mode 100644
index 000000000..7611130b4
--- /dev/null
+++ b/app/src/main/java/app/gamenative/ui/model/AccountManagementViewModel.kt
@@ -0,0 +1,41 @@
+package app.gamenative.ui.model
+
+import android.content.Context
+import androidx.lifecycle.ViewModel
+import androidx.lifecycle.viewModelScope
+import app.gamenative.service.GOG.GOGLibraryManager
+import dagger.hilt.android.lifecycle.HiltViewModel
+import javax.inject.Inject
+import kotlinx.coroutines.launch
+import timber.log.Timber
+
+@HiltViewModel
+class AccountManagementViewModel @Inject constructor(
+ private val gogLibraryManager: GOGLibraryManager,
+) : ViewModel() {
+ fun syncGOGLibraryAsync(context: Context, clearExisting: Boolean = true, onResult: (Result) -> Unit) {
+ viewModelScope.launch {
+ try {
+ // Clear existing games and start background sync
+ if (clearExisting) {
+ gogLibraryManager.clearLibrary()
+ }
+
+ // Start background sync and check if it was successful
+ val syncStartResult = gogLibraryManager.startBackgroundSync(context, clearExisting)
+
+ if (syncStartResult.isSuccess) {
+ // Sync started successfully, return current game count
+ val gameCount = gogLibraryManager.getLocalGameCount()
+ onResult(Result.success(gameCount))
+ } else {
+ // Sync failed to start, return the error
+ onResult(Result.failure(syncStartResult.exceptionOrNull() ?: Exception("Failed to start sync")))
+ }
+ } catch (e: Exception) {
+ Timber.e(e, "Exception during GOG sync start")
+ onResult(Result.failure(e))
+ }
+ }
+ }
+}
diff --git a/app/src/main/java/app/gamenative/ui/model/LibraryViewModel.kt b/app/src/main/java/app/gamenative/ui/model/LibraryViewModel.kt
index 94f0432a5..3c818b480 100644
--- a/app/src/main/java/app/gamenative/ui/model/LibraryViewModel.kt
+++ b/app/src/main/java/app/gamenative/ui/model/LibraryViewModel.kt
@@ -8,6 +8,7 @@ import androidx.lifecycle.ViewModel
import androidx.lifecycle.viewModelScope
import app.gamenative.PrefManager
import app.gamenative.data.Game
+import app.gamenative.data.GameSource
import app.gamenative.service.GameManagerService
import app.gamenative.ui.data.LibraryState
import app.gamenative.ui.enums.AppFilter
@@ -101,6 +102,13 @@ class LibraryViewModel @Inject constructor() : ViewModel() {
val filteredGames = allGames
.asSequence()
+ .filter { game ->
+ when {
+ currentState.appInfoSortType.contains(AppFilter.STEAM) -> game.source == GameSource.STEAM
+ currentState.appInfoSortType.contains(AppFilter.GOG) -> game.source == GameSource.GOG
+ else -> true
+ }
+ }
.filter { item ->
if (currentState.appInfoSortType.contains(AppFilter.SHARED)) {
true
diff --git a/app/src/main/java/app/gamenative/ui/screen/accounts/AccountManagementScreen.kt b/app/src/main/java/app/gamenative/ui/screen/accounts/AccountManagementScreen.kt
index 69796f511..8ded49715 100644
--- a/app/src/main/java/app/gamenative/ui/screen/accounts/AccountManagementScreen.kt
+++ b/app/src/main/java/app/gamenative/ui/screen/accounts/AccountManagementScreen.kt
@@ -17,7 +17,9 @@ import androidx.compose.ui.Modifier
import androidx.compose.ui.graphics.Brush
import androidx.compose.ui.tooling.preview.Preview
import androidx.compose.ui.unit.dp
+import androidx.hilt.navigation.compose.hiltViewModel
import app.gamenative.ui.component.topbar.BackButton
+import app.gamenative.ui.model.AccountManagementViewModel
import app.gamenative.ui.theme.PluviaTheme
import com.alorma.compose.settings.ui.SettingsGroup
import com.skydoves.landscapist.ImageOptions
@@ -29,6 +31,7 @@ fun AccountManagementScreen(
onNavigateRoute: (String) -> Unit,
onBack: () -> Unit,
modifier: Modifier = Modifier,
+ viewModel: AccountManagementViewModel = hiltViewModel(),
) {
val snackBarHostState = remember { SnackbarHostState() }
val scrollState = rememberScrollState()
@@ -51,7 +54,7 @@ fun AccountManagementScreen(
.fillMaxSize()
.verticalScroll(scrollState),
) {
- AccountsGroup(onNavigateRoute = onNavigateRoute)
+ AccountsGroup(onNavigateRoute = onNavigateRoute, viewModel = viewModel)
}
}
}
@@ -59,9 +62,11 @@ fun AccountManagementScreen(
@Composable
private fun AccountsGroup(
onNavigateRoute: (String) -> Unit,
+ viewModel: AccountManagementViewModel,
) {
SettingsGroup(title = { Text(text = "Accounts") }) {
SteamAccountSection(onNavigateRoute = onNavigateRoute)
+ GOGAccountSection(viewModel = viewModel)
// Other account sections (GOG, Epic Games, etc.)
}
}
diff --git a/app/src/main/java/app/gamenative/ui/screen/accounts/GOGAccountSection.kt b/app/src/main/java/app/gamenative/ui/screen/accounts/GOGAccountSection.kt
new file mode 100644
index 000000000..9f45593ba
--- /dev/null
+++ b/app/src/main/java/app/gamenative/ui/screen/accounts/GOGAccountSection.kt
@@ -0,0 +1,154 @@
+package app.gamenative.ui.screen.accounts
+
+import android.content.Intent
+import androidx.activity.compose.rememberLauncherForActivityResult
+import androidx.activity.result.contract.ActivityResultContracts
+import androidx.compose.foundation.layout.*
+import androidx.compose.material3.*
+import androidx.compose.runtime.*
+import androidx.compose.ui.Modifier
+import androidx.compose.ui.platform.LocalContext
+import androidx.compose.ui.unit.dp
+import app.gamenative.service.GOG.GOGService
+import app.gamenative.ui.model.AccountManagementViewModel
+import app.gamenative.ui.screen.auth.GOGOAuthActivity
+import kotlinx.coroutines.launch
+import timber.log.Timber
+
+@Composable
+fun GOGAccountSection(
+ viewModel: AccountManagementViewModel,
+ modifier: Modifier = Modifier,
+) {
+ val context = LocalContext.current
+ val scope = rememberCoroutineScope()
+
+ // State for GOG
+ var isGOGLoggedIn by remember { mutableStateOf(false) }
+ var gogUsername by remember { mutableStateOf("") }
+ var gogAuthInProgress by remember { mutableStateOf(false) }
+ var gogError by remember { mutableStateOf(null) }
+
+ // Check for existing GOG credentials on startup
+ LaunchedEffect(Unit) {
+ if (GOGService.hasStoredCredentials(context)) {
+ // Use GOGDL to validate credentials (this handles token refresh automatically)
+ val validationResult = GOGService.validateCredentials(context)
+
+ if (validationResult.isSuccess && validationResult.getOrThrow()) {
+ // Credentials are valid, get user info
+ val credentialsResult = GOGService.getStoredCredentials(context)
+ if (credentialsResult.isSuccess) {
+ val credentials = credentialsResult.getOrThrow()
+ isGOGLoggedIn = true
+ gogUsername = credentials.username
+ gogError = null
+ } else {
+ gogError = "Failed to get user info: ${credentialsResult.exceptionOrNull()?.message}"
+ isGOGLoggedIn = false
+ gogUsername = ""
+ }
+ } else {
+ val errorMsg = if (validationResult.isFailure) {
+ "Validation failed: ${validationResult.exceptionOrNull()?.message}"
+ } else {
+ "Session expired or invalid credentials"
+ }
+ gogError = errorMsg
+ isGOGLoggedIn = false
+ gogUsername = ""
+ }
+ }
+ }
+
+ // OAuth launcher for GOG authentication
+ val gogOAuthLauncher = rememberLauncherForActivityResult(
+ contract = ActivityResultContracts.StartActivityForResult(),
+ ) { result ->
+ when (result.resultCode) {
+ android.app.Activity.RESULT_OK -> {
+ val authCode = result.data?.getStringExtra(GOGOAuthActivity.EXTRA_AUTH_CODE)
+ if (authCode != null) {
+ // Got authorization code, now authenticate with GOGDL
+ scope.launch {
+ gogAuthInProgress = true
+ gogError = null
+
+ try {
+ val authConfigPath = "${context.filesDir}/gog_auth.json"
+ val authResult = GOGService.authenticateWithCode(authConfigPath, authCode)
+
+ if (authResult.isSuccess) {
+ val credentials = authResult.getOrThrow()
+ isGOGLoggedIn = true
+ gogUsername = credentials.username
+ gogError = null
+
+ // Automatically start GOG library sync after successful login
+ Timber.i("GOG login successful, starting automatic library sync...")
+ viewModel.syncGOGLibraryAsync(context, clearExisting = true) { result ->
+ if (result.isSuccess) {
+ Timber.i("GOG library sync started successfully after login")
+ } else {
+ Timber.w("Failed to start GOG library sync after login: ${result.exceptionOrNull()?.message}")
+ }
+ }
+ } else {
+ gogError = authResult.exceptionOrNull()?.message ?: "Authentication failed"
+ }
+ } catch (e: Exception) {
+ gogError = e.message ?: "Authentication failed"
+ } finally {
+ gogAuthInProgress = false
+ }
+ }
+ } else {
+ gogError = "No authorization code received"
+ gogAuthInProgress = false
+ }
+ }
+ android.app.Activity.RESULT_CANCELED -> {
+ val error = result.data?.getStringExtra(GOGOAuthActivity.EXTRA_ERROR)
+ gogError = error ?: "Authentication cancelled"
+ gogAuthInProgress = false
+ }
+ }
+ }
+
+ Column(
+ modifier = modifier,
+ verticalArrangement = Arrangement.spacedBy(16.dp),
+ ) {
+ // GOG Account Section
+ AccountSection(
+ title = "GOG",
+ description = "Access your GOG library and DRM-free games",
+ icon = "https://www.gog.com/favicon.ico",
+ isLoggedIn = isGOGLoggedIn,
+ username = if (isGOGLoggedIn) gogUsername else null,
+ isLoading = gogAuthInProgress,
+ error = gogError,
+ onLogin = {
+ // Launch GOG OAuth activity
+ gogAuthInProgress = true
+ gogError = null
+ val intent = Intent(context, GOGOAuthActivity::class.java)
+ gogOAuthLauncher.launch(intent)
+ },
+ onLogout = {
+ scope.launch {
+ try {
+ // Clear stored credentials using the service method
+ GOGService.clearStoredCredentials(context)
+
+ isGOGLoggedIn = false
+ gogUsername = ""
+ gogError = null
+ } catch (e: Exception) {
+ gogError = "Logout error: ${e.message}"
+ }
+ }
+ },
+ )
+ }
+}
diff --git a/app/src/main/java/app/gamenative/ui/screen/auth/GOGOAuthActivity.kt b/app/src/main/java/app/gamenative/ui/screen/auth/GOGOAuthActivity.kt
new file mode 100644
index 000000000..8203bb7ee
--- /dev/null
+++ b/app/src/main/java/app/gamenative/ui/screen/auth/GOGOAuthActivity.kt
@@ -0,0 +1,66 @@
+package app.gamenative.ui.screen.auth
+
+import android.app.Activity
+import android.content.Intent
+import android.net.Uri
+import android.os.Bundle
+import androidx.activity.ComponentActivity
+import androidx.activity.compose.setContent
+import app.gamenative.ui.component.dialog.GOGWebViewDialog
+import app.gamenative.ui.theme.PluviaTheme
+import timber.log.Timber
+
+class GOGOAuthActivity : ComponentActivity() {
+
+ companion object {
+ const val EXTRA_AUTH_CODE = "auth_code"
+ const val EXTRA_ERROR = "error"
+ const val GOG_CLIENT_ID = "46899977096215655" // TODO: we should use our own instead of Heroic's client id.
+ const val GOG_AUTH_URL = "https://auth.gog.com/auth?" +
+ "client_id=$GOG_CLIENT_ID" +
+ "&redirect_uri=https%3A%2F%2Fembed.gog.com%2Fon_login_success%3Forigin%3Dclient" +
+ "&response_type=code" +
+ "&layout=galaxy"
+ }
+
+ override fun onCreate(savedInstanceState: Bundle?) {
+ super.onCreate(savedInstanceState)
+
+ setContent {
+ PluviaTheme {
+ GOGWebViewDialog(
+ isVisible = true,
+ url = GOG_AUTH_URL,
+ onDismissRequest = {
+ setResult(Activity.RESULT_CANCELED)
+ finish()
+ },
+ onUrlChange = { currentUrl: String ->
+ // Check if this is the GOG redirect URL with authorization code
+ if (currentUrl.contains("embed.gog.com/on_login_success")) {
+ val extractedCode = extractAuthCode(currentUrl)
+ if (extractedCode != null) {
+ Timber.d("Automatically extracted auth code from URL")
+ val resultIntent = Intent().apply {
+ putExtra(EXTRA_AUTH_CODE, extractedCode)
+ }
+ setResult(Activity.RESULT_OK, resultIntent)
+ finish()
+ }
+ }
+ },
+ )
+ }
+ }
+ }
+
+ private fun extractAuthCode(url: String): String? {
+ return try {
+ val uri = Uri.parse(url)
+ uri.getQueryParameter("code")
+ } catch (e: Exception) {
+ Timber.e(e, "Failed to extract auth code from URL: $url")
+ null
+ }
+ }
+}
diff --git a/app/src/main/java/app/gamenative/ui/screen/auth/GOGWebViewDialog.kt b/app/src/main/java/app/gamenative/ui/screen/auth/GOGWebViewDialog.kt
new file mode 100644
index 000000000..ee968719d
--- /dev/null
+++ b/app/src/main/java/app/gamenative/ui/screen/auth/GOGWebViewDialog.kt
@@ -0,0 +1,181 @@
+package app.gamenative.ui.component.dialog
+
+import android.content.res.Configuration
+import android.os.Bundle
+import android.view.ViewGroup
+import android.webkit.WebChromeClient
+import android.webkit.WebSettings
+import android.webkit.WebView
+import android.webkit.WebViewClient
+import androidx.compose.foundation.layout.padding
+import androidx.compose.material.icons.Icons
+import androidx.compose.material.icons.filled.Close
+import androidx.compose.material3.CenterAlignedTopAppBar
+import androidx.compose.material3.ExperimentalMaterial3Api
+import androidx.compose.material3.Icon
+import androidx.compose.material3.IconButton
+import androidx.compose.material3.Scaffold
+import androidx.compose.material3.Text
+import androidx.compose.runtime.Composable
+import androidx.compose.runtime.getValue
+import androidx.compose.runtime.mutableStateOf
+import androidx.compose.runtime.remember
+import androidx.compose.runtime.saveable.rememberSaveable
+import androidx.compose.runtime.setValue
+import androidx.compose.ui.Modifier
+import androidx.compose.ui.text.style.TextOverflow
+import androidx.compose.ui.tooling.preview.Preview
+import androidx.compose.ui.viewinterop.AndroidView
+import androidx.compose.ui.window.Dialog
+import androidx.compose.ui.window.DialogProperties
+import app.gamenative.ui.theme.PluviaTheme
+import timber.log.Timber
+
+@OptIn(ExperimentalMaterial3Api::class)
+@Composable
+fun GOGWebViewDialog(
+ isVisible: Boolean,
+ url: String,
+ onDismissRequest: () -> Unit,
+ onUrlChange: ((String) -> Unit)? = null,
+) {
+ if (isVisible) {
+ var topBarTitle by rememberSaveable { mutableStateOf("GOG Authentication") }
+ val startingUrl by rememberSaveable(url) { mutableStateOf(url) }
+ var webView: WebView? = remember { null }
+ val webViewState = rememberSaveable { Bundle() }
+
+ Dialog(
+ onDismissRequest = {
+ if (webView?.canGoBack() == true) {
+ webView!!.goBack()
+ } else {
+ webViewState.clear()
+ onDismissRequest()
+ }
+ },
+ properties = DialogProperties(
+ usePlatformDefaultWidth = false,
+ dismissOnClickOutside = false,
+ ),
+ content = {
+ Scaffold(
+ topBar = {
+ CenterAlignedTopAppBar(
+ title = {
+ Text(
+ text = topBarTitle,
+ maxLines = 1,
+ overflow = TextOverflow.Ellipsis,
+ )
+ },
+ navigationIcon = {
+ IconButton(
+ onClick = {
+ webViewState.clear()
+ onDismissRequest()
+ },
+ content = { Icon(imageVector = Icons.Default.Close, null) },
+ )
+ },
+ )
+ },
+ ) { paddingValues ->
+ AndroidView(
+ modifier = Modifier.padding(paddingValues),
+ factory = { context ->
+ WebView(context).apply {
+ layoutParams = ViewGroup.LayoutParams(
+ ViewGroup.LayoutParams.MATCH_PARENT,
+ ViewGroup.LayoutParams.MATCH_PARENT,
+ )
+
+ // GOG-specific WebView settings
+ settings.apply {
+ javaScriptEnabled = true
+ domStorageEnabled = true
+ loadWithOverviewMode = true
+ useWideViewPort = true
+ builtInZoomControls = true
+ displayZoomControls = false
+ setSupportZoom(true)
+ allowFileAccess = true
+ allowContentAccess = true
+ allowFileAccessFromFileURLs = true
+ allowUniversalAccessFromFileURLs = true
+ mixedContentMode = WebSettings.MIXED_CONTENT_ALWAYS_ALLOW
+
+ // GOG-specific user agent (similar to Heroic)
+ userAgentString = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) Chrome/200.0"
+ }
+
+ webViewClient = object : WebViewClient() {
+ override fun shouldOverrideUrlLoading(view: WebView?, url: String?): Boolean {
+ Timber.d("GOG WebView navigating to: $url")
+ url?.let { currentUrl ->
+ onUrlChange?.invoke(currentUrl)
+ }
+ return super.shouldOverrideUrlLoading(view, url)
+ }
+
+ override fun onPageFinished(view: WebView?, url: String?) {
+ super.onPageFinished(view, url)
+ Timber.d("GOG WebView page finished loading: $url")
+ }
+
+ override fun onReceivedError(view: WebView?, errorCode: Int, description: String?, failingUrl: String?) {
+ super.onReceivedError(view, errorCode, description, failingUrl)
+ Timber.e("GOG WebView error: $errorCode - $description for URL: $failingUrl")
+ }
+ }
+
+ webChromeClient = object : WebChromeClient() {
+ override fun onReceivedTitle(view: WebView?, title: String?) {
+ title?.let { pageTitle ->
+ topBarTitle = pageTitle
+ Timber.d("GOG WebView title: $pageTitle")
+ }
+ }
+
+ override fun onProgressChanged(view: WebView?, newProgress: Int) {
+ super.onProgressChanged(view, newProgress)
+ Timber.d("GOG WebView progress: $newProgress%")
+ }
+ }
+
+ if (webViewState.size() > 0) {
+ restoreState(webViewState)
+ } else {
+ Timber.d("Loading GOG WebView URL: $startingUrl")
+ loadUrl(startingUrl)
+ }
+ webView = this
+ }
+ },
+ update = {
+ webView = it
+ },
+ onRelease = { view ->
+ view.saveState(webViewState)
+ },
+ )
+ }
+ },
+ )
+ }
+}
+
+@Preview(uiMode = Configuration.UI_MODE_NIGHT_YES or Configuration.UI_MODE_TYPE_NORMAL)
+@Preview
+@Composable
+private fun Preview_GOGWebView() {
+ PluviaTheme {
+ GOGWebViewDialog(
+ isVisible = true,
+ url = "https://auth.gog.com/auth?client_id=46899977096215655&redirect_uri=https%3A%2F%2Fembed.gog.com%2Fon_login_success%3Forigin%3Dclient&response_type=code&layout=galaxy",
+ onDismissRequest = {
+ println("GOG WebView dismissed!")
+ },
+ )
+ }
+}
diff --git a/app/src/main/java/app/gamenative/ui/screen/library/LibraryAppScreen.kt b/app/src/main/java/app/gamenative/ui/screen/library/LibraryAppScreen.kt
index d4c406f71..d5b531ca1 100644
--- a/app/src/main/java/app/gamenative/ui/screen/library/LibraryAppScreen.kt
+++ b/app/src/main/java/app/gamenative/ui/screen/library/LibraryAppScreen.kt
@@ -89,6 +89,7 @@ import app.gamenative.enums.PathType
import app.gamenative.enums.SaveLocation
import app.gamenative.enums.SyncResult
import app.gamenative.service.GameManagerService
+import app.gamenative.service.GOG.GOGService
import app.gamenative.service.SteamService
import app.gamenative.service.SteamService.Companion.getAppDirPath
import app.gamenative.ui.component.LoadingScreen
@@ -209,10 +210,14 @@ fun AppScreen(
DisposableEffect(downloadInfo) {
val onDownloadProgress: (Float) -> Unit = {
if (it >= 1f) {
+ // Download completed - update markers
+ val appDirPath = GameManagerService.getAppDirPath(libraryItem.appId)
+ MarkerUtils.removeMarker(appDirPath, Marker.DOWNLOAD_IN_PROGRESS_MARKER)
+ MarkerUtils.addMarker(appDirPath, Marker.DOWNLOAD_COMPLETE_MARKER)
+
isInstalled = GameManagerService.isGameInstalled(context, libraryItem)
downloadInfo = null
isInstalled = true
- MarkerUtils.addMarker(GameManagerService.getAppDirPath(libraryItem.appId), Marker.DOWNLOAD_COMPLETE_MARKER)
}
downloadProgress = it
}
@@ -331,7 +336,12 @@ fun AppScreen(
"game_name" to libraryItem.name,
),
)
- downloadInfo?.cancel()
+ // Cancel the download properly based on game source
+ if (libraryItem.gameSource == GameSource.GOG) {
+ GOGService.cancelDownload(libraryItem.appId)
+ } else {
+ downloadInfo?.cancel()
+ }
GameManagerService.deleteGame(context, libraryItem)
downloadInfo = null
downloadProgress = 0f
@@ -359,7 +369,15 @@ fun AppScreen(
)
CoroutineScope(Dispatchers.IO).launch {
downloadProgress = 0f
- downloadInfo = GameManagerService.downloadGame(context, libraryItem)
+ val result = GameManagerService.downloadGameWithResult(context, libraryItem)
+ if (result.isSuccess) {
+ downloadInfo = result.getOrNull()
+ } else {
+ // Download failed - show error message
+ CoroutineScope(Dispatchers.Main).launch {
+ Toast.makeText(context, result.exceptionOrNull()?.message ?: "Download failed", Toast.LENGTH_LONG).show()
+ }
+ }
msgDialogState = MessageDialogState(false)
}
}
@@ -462,7 +480,14 @@ fun AppScreen(
} else if (GameManagerService.hasPartialDownload(libraryItem)) {
// Resume incomplete download
CoroutineScope(Dispatchers.IO).launch {
- downloadInfo = GameManagerService.downloadGame(context, libraryItem)
+ val result = GameManagerService.downloadGameWithResult(context, libraryItem)
+ if (result.isSuccess) {
+ downloadInfo = result.getOrNull()
+ } else {
+ CoroutineScope(Dispatchers.Main).launch {
+ Toast.makeText(context, result.exceptionOrNull()?.message ?: "Download failed", Toast.LENGTH_LONG).show()
+ }
+ }
}
} else if (!isInstalled) {
permissionLauncher.launch(
@@ -484,10 +509,22 @@ fun AppScreen(
},
onPauseResumeClick = {
if (isDownloading()) {
- downloadInfo?.cancel()
+ // Cancel the download properly based on game source
+ if (libraryItem.gameSource == GameSource.GOG) {
+ GOGService.cancelDownload(libraryItem.appId)
+ } else {
+ downloadInfo?.cancel()
+ }
downloadInfo = null
} else {
- downloadInfo = GameManagerService.downloadGame(context, libraryItem)
+ val result = GameManagerService.downloadGameWithResult(context, libraryItem)
+ if (result.isSuccess) {
+ downloadInfo = result.getOrNull()
+ } else {
+ CoroutineScope(Dispatchers.Main).launch {
+ Toast.makeText(context, result.exceptionOrNull()?.message ?: "Download failed", Toast.LENGTH_LONG).show()
+ }
+ }
}
},
onDeleteDownloadClick = {
@@ -501,7 +538,16 @@ fun AppScreen(
)
},
onUpdateClick = {
- CoroutineScope(Dispatchers.IO).launch { downloadInfo = GameManagerService.downloadGame(context, libraryItem) }
+ CoroutineScope(Dispatchers.IO).launch {
+ val result = GameManagerService.downloadGameWithResult(context, libraryItem)
+ if (result.isSuccess) {
+ downloadInfo = result.getOrNull()
+ } else {
+ CoroutineScope(Dispatchers.Main).launch {
+ Toast.makeText(context, result.exceptionOrNull()?.message ?: "Download failed", Toast.LENGTH_LONG).show()
+ }
+ }
+ }
},
onBack = onBack,
optionsMenu = arrayOf(
@@ -581,7 +627,14 @@ fun AppScreen(
AppOptionMenuType.VerifyFiles,
onClick = {
CoroutineScope(Dispatchers.IO).launch {
- downloadInfo = GameManagerService.downloadGame(context, libraryItem)
+ val result = GameManagerService.downloadGameWithResult(context, libraryItem)
+ if (result.isSuccess) {
+ downloadInfo = result.getOrNull()
+ } else {
+ CoroutineScope(Dispatchers.Main).launch {
+ Toast.makeText(context, result.exceptionOrNull()?.message ?: "Download failed", Toast.LENGTH_LONG).show()
+ }
+ }
}
},
),
@@ -589,7 +642,14 @@ fun AppScreen(
AppOptionMenuType.Update,
onClick = {
CoroutineScope(Dispatchers.IO).launch {
- downloadInfo = GameManagerService.downloadGame(context, libraryItem)
+ val result = GameManagerService.downloadGameWithResult(context, libraryItem)
+ if (result.isSuccess) {
+ downloadInfo = result.getOrNull()
+ } else {
+ CoroutineScope(Dispatchers.Main).launch {
+ Toast.makeText(context, result.exceptionOrNull()?.message ?: "Download failed", Toast.LENGTH_LONG).show()
+ }
+ }
}
},
),
@@ -1267,8 +1327,16 @@ private fun AppScreenContent(
SkeletonText(lines = 1, lineHeight = 20)
} else {
if (!isInstalled) {
+ // Use remember and LaunchedEffect to handle async size loading
+ var downloadSize by remember(libraryItem.gameId) { mutableStateOf("Loading...") }
+
+ LaunchedEffect(libraryItem.gameId) {
+ // Now properly async - no more polling needed!
+ downloadSize = GameManagerService.getDownloadSize(libraryItem)
+ }
+
Text(
- text = GameManagerService.getDownloadSize(libraryItem),
+ text = downloadSize,
style = MaterialTheme.typography.bodyLarge.copy(fontWeight = FontWeight.SemiBold),
)
} else {
diff --git a/app/src/main/java/app/gamenative/ui/screen/library/components/LibraryBottomSheet.kt b/app/src/main/java/app/gamenative/ui/screen/library/components/LibraryBottomSheet.kt
index 87f10827a..afb1f5422 100644
--- a/app/src/main/java/app/gamenative/ui/screen/library/components/LibraryBottomSheet.kt
+++ b/app/src/main/java/app/gamenative/ui/screen/library/components/LibraryBottomSheet.kt
@@ -39,8 +39,8 @@ fun LibraryBottomSheet(
verticalArrangement = Arrangement.spacedBy(12.dp)
) {
AppFilter.entries.forEach { appFilter ->
- // TODO properly fix this (and the one below)
- if (appFilter.code !in listOf(0x01, 0x20)) {
+ // App Type filters: exclude status and platform filters
+ if (appFilter.code !in listOf(0x01, 0x20, 0x40, 0x80)) {
FlowFilterChip(
onClick = { onFilterChanged(appFilter) },
label = { Text(text = appFilter.displayText) },
@@ -68,6 +68,23 @@ fun LibraryBottomSheet(
}
}
+ Spacer(modifier = Modifier.height(16.dp))
+
+ Text(text = "Platform", style = MaterialTheme.typography.titleLarge)
+ Spacer(modifier = Modifier.height(8.dp))
+ FlowRow {
+ AppFilter.entries.forEach { appFilter ->
+ if (appFilter.code in listOf(0x40, 0x80)) { // Steam and GOG
+ FlowFilterChip(
+ onClick = { onFilterChanged(appFilter) },
+ label = { Text(text = appFilter.displayText) },
+ selected = selectedFilters.contains(appFilter),
+ leadingIcon = { Icon(imageVector = appFilter.icon, contentDescription = null) },
+ )
+ }
+ }
+ }
+
Spacer(modifier = Modifier.height(16.dp)) // A little extra padding.
}
}
diff --git a/app/src/main/python/gogdl/__init__.py b/app/src/main/python/gogdl/__init__.py
new file mode 100644
index 000000000..89b905c65
--- /dev/null
+++ b/app/src/main/python/gogdl/__init__.py
@@ -0,0 +1,6 @@
+"""
+Android-compatible GOGDL implementation
+Modified from heroic-gogdl for Android/Chaquopy compatibility
+"""
+
+version = "1.1.2-post1"
diff --git a/app/src/main/python/gogdl/api.py b/app/src/main/python/gogdl/api.py
new file mode 100644
index 000000000..d45413b9f
--- /dev/null
+++ b/app/src/main/python/gogdl/api.py
@@ -0,0 +1,118 @@
+import logging
+import time
+import requests
+import json
+from multiprocessing import cpu_count
+from gogdl.dl import dl_utils
+from gogdl import constants
+import gogdl.constants as constants
+
+
+class ApiHandler:
+ def __init__(self, auth_manager):
+ self.auth_manager = auth_manager
+ self.logger = logging.getLogger("API")
+ self.session = requests.Session()
+ adapter = requests.adapters.HTTPAdapter(pool_maxsize=cpu_count())
+ self.session.mount("https://", adapter)
+ self.session.headers = {
+ 'User-Agent': f'gogdl/1.0.0 (Android GameNative)'
+ }
+ credentials = self.auth_manager.get_credentials()
+ if credentials:
+ token = credentials["access_token"]
+ self.session.headers["Authorization"] = f"Bearer {token}"
+ self.owned = []
+
+ self.endpoints = dict() # Map of secure link endpoints
+ self.working_on_ids = list() # List of products we are waiting for to complete getting the secure link
+
+ def get_item_data(self, id, expanded=None):
+ if expanded is None:
+ expanded = []
+ self.logger.info(f"Getting info from products endpoint for id: {id}")
+ url = f'{constants.GOG_API}/products/{id}'
+ expanded_arg = '?expand='
+ if len(expanded) > 0:
+ expanded_arg += ','.join(expanded)
+ url += expanded_arg
+ response = self.session.get(url)
+ self.logger.debug(url)
+ if response.ok:
+ return response.json()
+ else:
+ self.logger.error(f"Request failed {response}")
+
+ def get_game_details(self, id):
+ url = f'{constants.GOG_EMBED}/account/gameDetails/{id}.json'
+ response = self.session.get(url)
+ if response.ok:
+ return response.json()
+ else:
+ self.logger.error(f"Request failed {response}")
+
+ def get_user_data(self):
+ url = f'{constants.GOG_API}/user/data/games'
+ response = self.session.get(url)
+ if response.ok:
+ return response.json()
+ else:
+ self.logger.error(f"Request failed {response}")
+
+ def get_builds(self, product_id, platform):
+ url = f'{constants.GOG_CONTENT_SYSTEM}/products/{product_id}/os/{platform}/builds?generation=2'
+ response = self.session.get(url)
+ if response.ok:
+ return response.json()
+ else:
+ self.logger.error(f"Request failed {response}")
+
+ def get_manifest(self, manifest_id, product_id):
+ url = f'{constants.GOG_CONTENT_SYSTEM}/products/{product_id}/os/windows/builds/{manifest_id}'
+ response = self.session.get(url)
+ if response.ok:
+ return response.json()
+ else:
+ self.logger.error(f"Request failed {response}")
+
+ def get_authenticated_request(self, url):
+ """Make an authenticated request with proper headers"""
+ return self.session.get(url)
+
+
+ def get_dependencies_repo(self, depot_version=2):
+ self.logger.info("Getting Dependencies repository")
+ url = constants.DEPENDENCIES_URL if depot_version == 2 else constants.DEPENDENCIES_V1_URL
+ response = self.session.get(url)
+ if not response.ok:
+ return None
+
+ json_data = json.loads(response.content)
+ return json_data
+
+ def get_secure_link(self, product_id, path="", generation=2, root=None):
+ """Get secure download links from GOG API"""
+ url = ""
+ if generation == 2:
+ url = f"{constants.GOG_CONTENT_SYSTEM}/products/{product_id}/secure_link?_version=2&generation=2&path={path}"
+ elif generation == 1:
+ url = f"{constants.GOG_CONTENT_SYSTEM}/products/{product_id}/secure_link?_version=2&type=depot&path={path}"
+
+ if root:
+ url += f"&root={root}"
+
+ try:
+ response = self.get_authenticated_request(url)
+
+ if response.status_code != 200:
+ self.logger.warning(f"Invalid secure link response: {response.status_code}")
+ time.sleep(0.2)
+ return self.get_secure_link(product_id, path, generation, root)
+
+ js = response.json()
+ return js.get('urls', [])
+
+ except Exception as e:
+ self.logger.error(f"Failed to get secure link: {e}")
+ time.sleep(0.2)
+ return self.get_secure_link(product_id, path, generation, root)
\ No newline at end of file
diff --git a/app/src/main/python/gogdl/args.py b/app/src/main/python/gogdl/args.py
new file mode 100644
index 000000000..dca4cf519
--- /dev/null
+++ b/app/src/main/python/gogdl/args.py
@@ -0,0 +1,85 @@
+"""
+Android-compatible argument parser for GOGDL
+"""
+
+import argparse
+from gogdl import constants
+
+def init_parser():
+ """Initialize argument parser with Android-compatible defaults"""
+
+ parser = argparse.ArgumentParser(
+ description='Android-compatible GOG downloader',
+ formatter_class=argparse.RawDescriptionHelpFormatter
+ )
+
+ parser.add_argument(
+ '--auth-config-path',
+ type=str,
+ default=f"{constants.ANDROID_DATA_DIR}/gog_auth.json",
+ help='Path to authentication config file'
+ )
+
+ parser.add_argument(
+ '--display-version',
+ action='store_true',
+ help='Display version information'
+ )
+
+ subparsers = parser.add_subparsers(dest='command', help='Available commands')
+
+ # Auth command
+ auth_parser = subparsers.add_parser('auth', help='Authenticate with GOG or get existing credentials')
+ auth_parser.add_argument('--code', type=str, help='Authorization code from GOG (optional - if not provided, returns existing credentials)')
+
+ # Download command
+ download_parser = subparsers.add_parser('download', help='Download a game')
+ download_parser.add_argument('id', type=str, help='Game ID to download')
+ download_parser.add_argument('--path', type=str, default=constants.ANDROID_GAMES_DIR, help='Download path')
+ download_parser.add_argument('--platform', type=str, default='windows', choices=['windows', 'linux'], help='Platform')
+ download_parser.add_argument('--branch', type=str, help='Game branch to download')
+ download_parser.add_argument('--skip-dlcs', dest='dlcs', action='store_false', help='Skip DLC downloads')
+ download_parser.add_argument('--with-dlcs', dest='dlcs', action='store_true', help='Download DLCs')
+ download_parser.add_argument('--dlcs', dest='dlcs_list', default=[], help='List of dlc ids to download (separated by comma)')
+ download_parser.add_argument('--dlc-only', dest='dlc_only', action='store_true', help='Download only DLC')
+
+ download_parser.add_argument('--lang', type=str, default='en-US', help='Language for the download')
+ download_parser.add_argument('--max-workers', dest='workers_count', type=int, default=2, help='Number of download workers')
+ download_parser.add_argument('--support', dest='support_path', type=str, help='Support files path')
+ download_parser.add_argument('--password', dest='password', help='Password to access other branches')
+ download_parser.add_argument('--force-gen', choices=['1', '2'], dest='force_generation', help='Force specific manifest generation (FOR DEBUGGING)')
+ download_parser.add_argument('--build', '-b', dest='build', help='Specify buildId')
+
+ # Info command (same as heroic-gogdl calculate_size_parser)
+ info_parser = subparsers.add_parser('info', help='Calculates estimated download size and list of DLCs')
+ info_parser.add_argument('--with-dlcs', dest='dlcs', action='store_true', help='Should download all dlcs')
+ info_parser.add_argument('--skip-dlcs', dest='dlcs', action='store_false', help='Should skip all dlcs')
+ info_parser.add_argument('--dlcs', dest='dlcs_list', help='Comma separated list of dlc ids to download')
+ info_parser.add_argument('--dlc-only', dest='dlc_only', action='store_true', help='Download only DLC')
+ info_parser.add_argument('id', help='Game ID')
+ info_parser.add_argument('--platform', '--os', dest='platform', help='Target operating system', choices=['windows', 'linux'], default='windows')
+ info_parser.add_argument('--build', '-b', dest='build', help='Specify buildId')
+ info_parser.add_argument('--branch', dest='branch', help='Choose build branch to use')
+ info_parser.add_argument('--password', dest='password', help='Password to access other branches')
+ info_parser.add_argument('--force-gen', choices=['1', '2'], dest='force_generation', help='Force specific manifest generation (FOR DEBUGGING)')
+ info_parser.add_argument('--lang', '-l', dest='lang', help='Specify game language', default='en-US')
+ info_parser.add_argument('--max-workers', dest='workers_count', type=int, default=2, help='Number of download workers')
+
+ # Repair command
+ repair_parser = subparsers.add_parser('repair', help='Repair/verify game files')
+ repair_parser.add_argument('id', type=str, help='Game ID to repair')
+ repair_parser.add_argument('--path', type=str, default=constants.ANDROID_GAMES_DIR, help='Game path')
+ repair_parser.add_argument('--platform', type=str, default='windows', choices=['windows', 'linux'], help='Platform')
+ repair_parser.add_argument('--password', dest='password', help='Password to access other branches')
+ repair_parser.add_argument('--force-gen', choices=['1', '2'], dest='force_generation', help='Force specific manifest generation (FOR DEBUGGING)')
+ repair_parser.add_argument('--build', '-b', dest='build', help='Specify buildId')
+ repair_parser.add_argument('--branch', dest='branch', help='Choose build branch to use')
+
+ # Save sync command
+ save_parser = subparsers.add_parser('save-sync', help='Sync game saves')
+ save_parser.add_argument('path', help='Path to sync files')
+ save_parser.add_argument('--dirname', help='Cloud save directory name')
+ save_parser.add_argument('--timestamp', type=float, default=0.0, help='Last sync timestamp')
+ save_parser.add_argument('--prefered-action', choices=['upload', 'download', 'none'], help='Preferred sync action')
+
+ return parser.parse_known_args()
diff --git a/app/src/main/python/gogdl/auth.py b/app/src/main/python/gogdl/auth.py
new file mode 100644
index 000000000..9eda306fd
--- /dev/null
+++ b/app/src/main/python/gogdl/auth.py
@@ -0,0 +1,133 @@
+"""
+Android-compatible authentication module
+Based on original auth.py with Android compatibility
+"""
+
+import json
+import os
+import logging
+import requests
+import time
+from typing import Optional, Dict, Any
+
+CLIENT_ID = "46899977096215655"
+CLIENT_SECRET = "9d85c43b1482497dbbce61f6e4aa173a433796eeae2ca8c5f6129f2dc4de46d9"
+
+class AuthorizationManager:
+ """Android-compatible authorization manager with token refresh"""
+
+ def __init__(self, config_path: str):
+ self.config_path = config_path
+ self.logger = logging.getLogger("AUTH")
+ self.credentials_data = {}
+ self._read_config()
+
+ def _read_config(self):
+ """Read credentials from config file"""
+ if os.path.exists(self.config_path):
+ try:
+ with open(self.config_path, "r") as f:
+ self.credentials_data = json.load(f)
+ except Exception as e:
+ self.logger.error(f"Failed to read config: {e}")
+ self.credentials_data = {}
+
+ def _write_config(self):
+ """Write credentials to config file"""
+ try:
+ os.makedirs(os.path.dirname(self.config_path), exist_ok=True)
+ with open(self.config_path, "w") as f:
+ json.dump(self.credentials_data, f, indent=2)
+ except Exception as e:
+ self.logger.error(f"Failed to write config: {e}")
+
+ def get_credentials(self, client_id=None, client_secret=None):
+ """
+ Reads data from config and returns it with automatic refresh if expired
+ :param client_id: GOG client ID
+ :param client_secret: GOG client secret
+ :return: dict with credentials or None if not present
+ """
+ if not client_id:
+ client_id = CLIENT_ID
+ if not client_secret:
+ client_secret = CLIENT_SECRET
+
+ credentials = self.credentials_data.get(client_id)
+ if not credentials:
+ return None
+
+ # Check if credentials are expired and refresh if needed
+ if self.is_credential_expired(client_id):
+ if self.refresh_credentials(client_id, client_secret):
+ credentials = self.credentials_data.get(client_id)
+ else:
+ return None
+
+ return credentials
+
+ def is_credential_expired(self, client_id=None) -> bool:
+ """
+ Checks if provided client_id credential is expired
+ :param client_id: GOG client ID
+ :return: whether credentials are expired
+ """
+ if not client_id:
+ client_id = CLIENT_ID
+ credentials = self.credentials_data.get(client_id)
+
+ if not credentials:
+ return True
+
+ # If no loginTime or expires_in, assume expired
+ if "loginTime" not in credentials or "expires_in" not in credentials:
+ return True
+
+ return time.time() >= credentials["loginTime"] + credentials["expires_in"]
+
+ def refresh_credentials(self, client_id=None, client_secret=None) -> bool:
+ """
+ Refreshes credentials and saves them to config
+ :param client_id: GOG client ID
+ :param client_secret: GOG client secret
+ :return: bool if operation was success
+ """
+ if not client_id:
+ client_id = CLIENT_ID
+ if not client_secret:
+ client_secret = CLIENT_SECRET
+
+ credentials = self.credentials_data.get(CLIENT_ID)
+ if not credentials or "refresh_token" not in credentials:
+ self.logger.error("No refresh token available")
+ return False
+
+ refresh_token = credentials["refresh_token"]
+ url = f"https://auth.gog.com/token?client_id={client_id}&client_secret={client_secret}&grant_type=refresh_token&refresh_token={refresh_token}"
+
+ try:
+ response = requests.get(url, timeout=10)
+ except (requests.ConnectionError, requests.Timeout):
+ self.logger.error("Failed to refresh credentials")
+ return False
+
+ if not response.ok:
+ self.logger.error(f"Failed to refresh credentials: HTTP {response.status_code}")
+ return False
+
+ data = response.json()
+ data["loginTime"] = time.time()
+ self.credentials_data.update({client_id: data})
+ self._write_config()
+ return True
+
+ def get_access_token(self) -> Optional[str]:
+ """Get access token from auth config"""
+ credentials = self.get_credentials()
+ if credentials and 'access_token' in credentials:
+ return credentials['access_token']
+ return None
+
+ def is_authenticated(self) -> bool:
+ """Check if user is authenticated"""
+ return self.get_access_token() is not None
diff --git a/app/src/main/python/gogdl/cli.py b/app/src/main/python/gogdl/cli.py
new file mode 100644
index 000000000..63cfc4d55
--- /dev/null
+++ b/app/src/main/python/gogdl/cli.py
@@ -0,0 +1,177 @@
+#!/usr/bin/env python3
+"""
+Android-compatible GOGDL CLI module
+Removes multiprocessing and other Android-incompatible features
+"""
+
+import gogdl.args as args
+from gogdl.dl.managers import manager
+import gogdl.api as api
+import gogdl.auth as auth
+from gogdl import version as gogdl_version
+import json
+import logging
+
+
+def display_version():
+ print(f"{gogdl_version}")
+
+
+def handle_auth(arguments, api_handler):
+ """Handle GOG authentication - exchange authorization code for access token or get existing credentials"""
+ logger = logging.getLogger("GOGDL-AUTH")
+
+ try:
+ import requests
+ import os
+ import time
+
+ # GOG OAuth constants
+ GOG_CLIENT_ID = "46899977096215655"
+ GOG_CLIENT_SECRET = "9d85c43b1482497dbbce61f6e4aa173a433796eeae2ca8c5f6129f2dc4de46d9"
+ GOG_TOKEN_URL = "https://auth.gog.com/token"
+ GOG_USER_URL = "https://embed.gog.com/userData.json"
+
+ # Initialize authorization manager
+ auth_manager = api_handler.auth_manager
+
+ if arguments.code:
+ # Exchange authorization code for access token
+ logger.info("Exchanging authorization code for access token...")
+
+ token_data = {
+ "client_id": GOG_CLIENT_ID,
+ "client_secret": GOG_CLIENT_SECRET,
+ "grant_type": "authorization_code",
+ "code": arguments.code,
+ "redirect_uri": "https://embed.gog.com/on_login_success?origin=client"
+ }
+
+ response = requests.post(GOG_TOKEN_URL, data=token_data)
+
+ if response.status_code != 200:
+ error_msg = f"Token exchange failed: HTTP {response.status_code} - {response.text}"
+ logger.error(error_msg)
+ print(json.dumps({"error": True, "message": error_msg}))
+ return
+
+ token_response = response.json()
+ access_token = token_response.get("access_token")
+ refresh_token = token_response.get("refresh_token")
+
+ if not access_token:
+ error_msg = "No access token in response"
+ logger.error(error_msg)
+ print(json.dumps({"error": True, "message": error_msg}))
+ return
+
+ # Get user information
+ logger.info("Getting user information...")
+ user_response = requests.get(
+ GOG_USER_URL,
+ headers={"Authorization": f"Bearer {access_token}"}
+ )
+
+ username = "GOG User"
+ user_id = "unknown"
+
+ if user_response.status_code == 200:
+ user_data = user_response.json()
+ username = user_data.get("username", "GOG User")
+ user_id = str(user_data.get("userId", "unknown"))
+ else:
+ logger.warning(f"Failed to get user info: HTTP {user_response.status_code}")
+
+ # Save credentials with loginTime and expires_in (like original auth.py)
+ auth_data = {
+ GOG_CLIENT_ID: {
+ "access_token": access_token,
+ "refresh_token": refresh_token,
+ "user_id": user_id,
+ "username": username,
+ "loginTime": time.time(),
+ "expires_in": token_response.get("expires_in", 3600)
+ }
+ }
+
+ os.makedirs(os.path.dirname(arguments.auth_config_path), exist_ok=True)
+
+ with open(arguments.auth_config_path, 'w') as f:
+ json.dump(auth_data, f, indent=2)
+
+ logger.info(f"Authentication successful for user: {username}")
+ print(json.dumps(auth_data[GOG_CLIENT_ID]))
+
+ else:
+ # Get existing credentials (like original auth.py get_credentials)
+ logger.info("Getting existing credentials...")
+ credentials = auth_manager.get_credentials()
+
+ if credentials:
+ logger.info(f"Retrieved credentials for user: {credentials.get('username', 'GOG User')}")
+ print(json.dumps(credentials))
+ else:
+ logger.warning("No valid credentials found")
+ print(json.dumps({"error": True, "message": "No valid credentials found"}))
+
+ except Exception as e:
+ logger.error(f"Authentication failed: {e}")
+ print(json.dumps({"error": True, "message": str(e)}))
+ raise
+
+
+def main():
+ arguments, unknown_args = args.init_parser()
+ level = logging.INFO
+ if '-d' in unknown_args or '--debug' in unknown_args:
+ level = logging.DEBUG
+ logging.basicConfig(format="[%(name)s] %(levelname)s: %(message)s", level=level)
+ logger = logging.getLogger("GOGDL-ANDROID")
+ logger.debug(arguments)
+
+ if arguments.display_version:
+ display_version()
+ return
+
+ if not arguments.command:
+ print("No command provided!")
+ return
+
+ # Initialize Android-compatible managers
+ authorization_manager = auth.AuthorizationManager(arguments.auth_config_path)
+ api_handler = api.ApiHandler(authorization_manager)
+
+ switcher = {}
+
+ # Handle authentication command
+ if arguments.command == "auth":
+ switcher["auth"] = lambda: handle_auth(arguments, api_handler)
+
+ # Handle download/info commands
+ if arguments.command in ["download", "repair", "update", "info"]:
+ download_manager = manager.AndroidManager(arguments, unknown_args, api_handler)
+ switcher.update({
+ "download": download_manager.download,
+ "repair": download_manager.download,
+ "update": download_manager.download,
+ "info": lambda: download_manager.calculate_download_size(arguments, unknown_args),
+ })
+
+ # Handle save sync command
+ if arguments.command == "save-sync":
+ import gogdl.saves as saves
+ clouds_storage_manager = saves.CloudStorageManager(api_handler, authorization_manager)
+ switcher["save-sync"] = lambda: clouds_storage_manager.sync(arguments, unknown_args)
+
+ if arguments.command in switcher:
+ try:
+ switcher[arguments.command]()
+ except Exception as e:
+ logger.error(f"Command failed: {e}")
+ raise
+ else:
+ logger.error(f"Unknown command: {arguments.command}")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/app/src/main/python/gogdl/constants.py b/app/src/main/python/gogdl/constants.py
new file mode 100644
index 000000000..2e8a41c63
--- /dev/null
+++ b/app/src/main/python/gogdl/constants.py
@@ -0,0 +1,29 @@
+"""
+Android-compatible constants for GOGDL
+"""
+
+import os
+
+# GOG API endpoints (matching original heroic-gogdl)
+GOG_CDN = "https://gog-cdn-fastly.gog.com"
+GOG_CONTENT_SYSTEM = "https://content-system.gog.com"
+GOG_EMBED = "https://embed.gog.com"
+GOG_AUTH = "https://auth.gog.com"
+GOG_API = "https://api.gog.com"
+GOG_CLOUDSTORAGE = "https://cloudstorage.gog.com"
+DEPENDENCIES_URL = "https://content-system.gog.com/dependencies/repository?generation=2"
+DEPENDENCIES_V1_URL = "https://content-system.gog.com/redists/repository"
+
+NON_NATIVE_SEP = "\\" if os.sep == "/" else "/"
+
+# Android-specific paths
+ANDROID_DATA_DIR = "/data/user/0/app.gamenative/files"
+ANDROID_GAMES_DIR = "/data/data/app.gamenative/storage/gog_games"
+CONFIG_DIR = ANDROID_DATA_DIR
+MANIFESTS_DIR = os.path.join(CONFIG_DIR, "manifests")
+
+# Download settings optimized for Android
+DEFAULT_CHUNK_SIZE = 1024 * 1024 # 1MB chunks for mobile
+MAX_CONCURRENT_DOWNLOADS = 2 # Conservative for mobile
+CONNECTION_TIMEOUT = 30 # 30 second timeout
+READ_TIMEOUT = 60 # 1 minute read timeout
diff --git a/app/src/main/python/gogdl/dl/__init__.py b/app/src/main/python/gogdl/dl/__init__.py
new file mode 100644
index 000000000..0c3e11496
--- /dev/null
+++ b/app/src/main/python/gogdl/dl/__init__.py
@@ -0,0 +1,3 @@
+"""
+Android-compatible download module
+"""
\ No newline at end of file
diff --git a/app/src/main/python/gogdl/dl/dl_utils.py b/app/src/main/python/gogdl/dl/dl_utils.py
new file mode 100644
index 000000000..1f332a1dd
--- /dev/null
+++ b/app/src/main/python/gogdl/dl/dl_utils.py
@@ -0,0 +1,184 @@
+import json
+import zlib
+import os
+import gogdl.constants as constants
+from gogdl.dl.objects import v1, v2
+import shutil
+import time
+import requests
+from sys import exit, platform
+import logging
+
+PATH_SEPARATOR = os.sep
+TIMEOUT = 10
+
+
+def get_json(api_handler, url):
+ logger = logging.getLogger("DL_UTILS")
+ logger.info(f"Fetching JSON from: {url}")
+ x = api_handler.session.get(url, headers={"Accept": "application/json"})
+ logger.info(f"Response status: {x.status_code}")
+ if not x.ok:
+ logger.error(f"Request failed: {x.status_code} - {x.text}")
+ return
+ logger.info("JSON fetch successful")
+ return x.json()
+
+
+def get_zlib_encoded(api_handler, url):
+ retries = 5
+ while retries > 0:
+ try:
+ x = api_handler.session.get(url, timeout=TIMEOUT)
+ if not x.ok:
+ return None, None
+ try:
+ decompressed = json.loads(zlib.decompress(x.content, 15))
+ except zlib.error:
+ return x.json(), x.headers
+ return decompressed, x.headers
+ except Exception:
+ time.sleep(2)
+ retries-=1
+ return None, None
+
+
+def prepare_location(path, logger=None):
+ os.makedirs(path, exist_ok=True)
+ if logger:
+ logger.debug(f"Created directory {path}")
+
+
+# V1 Compatible
+def galaxy_path(manifest: str):
+ galaxy_path = manifest
+ if galaxy_path.find("/") == -1:
+ galaxy_path = manifest[0:2] + "/" + manifest[2:4] + "/" + galaxy_path
+ return galaxy_path
+
+
+def get_secure_link(api_handler, path, gameId, generation=2, logger=None, root=None):
+ url = ""
+ if generation == 2:
+ url = f"{constants.GOG_CONTENT_SYSTEM}/products/{gameId}/secure_link?_version=2&generation=2&path={path}"
+ elif generation == 1:
+ url = f"{constants.GOG_CONTENT_SYSTEM}/products/{gameId}/secure_link?_version=2&type=depot&path={path}"
+ if root:
+ url += f"&root={root}"
+
+ try:
+ r = requests.get(url, headers=api_handler.session.headers, timeout=TIMEOUT)
+ except BaseException as exception:
+ if logger:
+ logger.info(exception)
+ time.sleep(0.2)
+ return get_secure_link(api_handler, path, gameId, generation, logger)
+
+ if r.status_code != 200:
+ if logger:
+ logger.info("invalid secure link response")
+ time.sleep(0.2)
+ return get_secure_link(api_handler, path, gameId, generation, logger)
+
+ js = r.json()
+
+ return js['urls']
+
+def get_dependency_link(api_handler):
+ data = get_json(
+ api_handler,
+ f"{constants.GOG_CONTENT_SYSTEM}/open_link?generation=2&_version=2&path=/dependencies/store/",
+ )
+ if not data:
+ return None
+ return data["urls"]
+
+
+def merge_url_with_params(url, parameters):
+ for key in parameters.keys():
+ url = url.replace("{" + key + "}", str(parameters[key]))
+ if not url:
+ print(f"Error ocurred getting a secure link: {url}")
+ return url
+
+
+def parent_dir(path: str):
+ return os.path.split(path)[0]
+
+
+def calculate_sum(path, function, read_speed_function=None):
+ with open(path, "rb") as f:
+ calculate = function()
+ while True:
+ chunk = f.read(16 * 1024)
+ if not chunk:
+ break
+ if read_speed_function:
+ read_speed_function(len(chunk))
+ calculate.update(chunk)
+
+ return calculate.hexdigest()
+
+
+def get_readable_size(size):
+ power = 2 ** 10
+ n = 0
+ power_labels = {0: "", 1: "K", 2: "M", 3: "G"}
+ while size > power:
+ size /= power
+ n += 1
+ return size, power_labels[n] + "B"
+
+
+def check_free_space(size: int, path: str):
+ if not os.path.exists(path):
+ os.makedirs(path, exist_ok=True)
+ _, _, available_space = shutil.disk_usage(path)
+
+ return size < available_space
+
+
+def get_range_header(offset, size):
+ from_value = offset
+ to_value = (int(offset) + int(size)) - 1
+ return f"bytes={from_value}-{to_value}"
+
+# Creates appropriate Manifest class based on provided meta from json
+def create_manifest_class(meta: dict, api_handler):
+ version = meta.get("version")
+ if version == 1:
+ return v1.Manifest.from_json(meta, api_handler)
+ else:
+ return v2.Manifest.from_json(meta, api_handler)
+
+def get_case_insensitive_name(path):
+ if platform == "win32" or os.path.exists(path):
+ return path
+ root = path
+ # Find existing directory
+ while not os.path.exists(root):
+ root = os.path.split(root)[0]
+
+ if not root[len(root) - 1] in ["/", "\\"]:
+ root = root + os.sep
+ # Separate unknown path from existing one
+ s_working_dir = path.replace(root, "").split(os.sep)
+ paths_to_find = len(s_working_dir)
+ paths_found = 0
+ for directory in s_working_dir:
+ if not os.path.exists(root):
+ break
+ dir_list = os.listdir(root)
+ found = False
+ for existing_dir in dir_list:
+ if existing_dir.lower() == directory.lower():
+ root = os.path.join(root, existing_dir)
+ paths_found += 1
+ found = True
+ if not found:
+ root = os.path.join(root, directory)
+ paths_found += 1
+
+ if paths_to_find != paths_found:
+ root = os.path.join(root, os.sep.join(s_working_dir[paths_found:]))
+ return root
\ No newline at end of file
diff --git a/app/src/main/python/gogdl/dl/managers/__init__.py b/app/src/main/python/gogdl/dl/managers/__init__.py
new file mode 100644
index 000000000..58e7b4716
--- /dev/null
+++ b/app/src/main/python/gogdl/dl/managers/__init__.py
@@ -0,0 +1,4 @@
+"""
+Android-compatible download managers
+"""
+
diff --git a/app/src/main/python/gogdl/dl/managers/dependencies.py b/app/src/main/python/gogdl/dl/managers/dependencies.py
new file mode 100644
index 000000000..8727f7101
--- /dev/null
+++ b/app/src/main/python/gogdl/dl/managers/dependencies.py
@@ -0,0 +1,166 @@
+from sys import exit
+import logging
+import os
+import json
+from typing import Optional
+from gogdl.dl import dl_utils
+import gogdl.constants as constants
+from gogdl.dl.managers.task_executor import ExecutingManager
+from gogdl.dl.objects import v2
+from gogdl.dl.objects.generic import BaseDiff
+
+
+def get_depot_list(manifest, product_id=None):
+ download_list = list()
+ for item in manifest["depot"]["items"]:
+ if item["type"] == "DepotFile":
+ download_list.append(v2.DepotFile(item, product_id))
+ return download_list
+
+
+# Looks like we can use V2 dependencies for V1 games too WOAH
+# We are doing that obviously
+class DependenciesManager:
+ def __init__(
+ self, ids, path, workers_count, api_handler, print_manifest=False, download_game_deps_only=False
+ ):
+ self.api = api_handler
+
+ self.logger = logging.getLogger("REDIST")
+
+ self.path = path
+ self.installed_manifest = os.path.join(self.path, '.gogdl-redist-manifest')
+ self.workers_count = int(workers_count)
+ self.build = self.api.get_dependencies_repo()
+ self.repository = dl_utils.get_zlib_encoded(self.api, self.build['repository_manifest'])[0] or {}
+ # Put version for easier serialization
+ self.repository['build_id'] = self.build['build_id']
+
+ self.ids = ids
+ self.download_game_deps_only = download_game_deps_only # Basically skip all redist with path starting with __redist
+ if self.repository and print_manifest:
+ print(json.dumps(self.repository))
+
+ def get_files_for_depot_manifest(self, manifest):
+ url = f'{constants.GOG_CDN}/content-system/v2/dependencies/meta/{dl_utils.galaxy_path(manifest)}'
+ manifest = dl_utils.get_zlib_encoded(self.api, url)[0]
+
+ return get_depot_list(manifest, 'redist')
+
+
+ def get(self, return_files=False):
+ old_depots = []
+ new_depots = []
+ if not self.ids:
+ return []
+ installed = set()
+
+ # This will be always None for redist writen in game dir
+ existing_manifest = None
+ if os.path.exists(self.installed_manifest):
+ try:
+ with open(self.installed_manifest, 'r') as f:
+ existing_manifest = json.load(f)
+ except Exception:
+ existing_manifest = None
+ pass
+ else:
+ if 'depots' in existing_manifest and 'build_id' in existing_manifest:
+ already_installed = existing_manifest.get('HGLInstalled') or []
+ for depot in existing_manifest["depots"]:
+ if depot["dependencyId"] in already_installed:
+ old_depots.append(depot)
+
+ for depot in self.repository["depots"]:
+ if depot["dependencyId"] in self.ids:
+ # By default we want to download all redist beginning
+ # with redist (game installation runs installation of the game's ones)
+ should_download = depot["executable"]["path"].startswith("__redist")
+
+ # If we want to download redist located in game dir we flip the boolean
+ if self.download_game_deps_only:
+ should_download = not should_download
+
+ if should_download:
+ installed.add(depot['dependencyId'])
+ new_depots.append(depot)
+
+ new_files = []
+ old_files = []
+
+ # Collect files for each redistributable
+ for depot in new_depots:
+ new_files += self.get_files_for_depot_manifest(depot["manifest"])
+
+ for depot in old_depots:
+ old_files += self.get_files_for_depot_manifest(depot["manifest"])
+
+ if return_files:
+ return new_files
+
+
+ diff = DependenciesDiff.compare(new_files, old_files)
+
+ if not len(diff.changed) and not len(diff.deleted) and not len(diff.new):
+ self.logger.info("Nothing to do")
+ self._write_manifest(installed)
+ return
+
+ secure_link = dl_utils.get_dependency_link(self.api) # This should never expire
+ executor = ExecutingManager(self.api, self.workers_count, self.path, os.path.join(self.path, 'gog-support'), diff, {'redist': secure_link}, 'gog-redist')
+ success = executor.setup()
+ if not success:
+ print('Unable to proceed, Not enough disk space')
+ exit(2)
+ cancelled = executor.run()
+
+ if cancelled:
+ return
+
+ self._write_manifest(installed)
+
+ def _write_manifest(self, installed: set):
+ repository = self.repository
+ repository['HGLInstalled'] = list(installed)
+ with open(self.installed_manifest, 'w') as f:
+ json.dump(repository, f)
+
+
+class DependenciesDiff(BaseDiff):
+ def __init__(self):
+ super().__init__()
+
+ @classmethod
+ def compare(cls, new_files: list, old_files: Optional[list]):
+ comparison = cls()
+
+ if not old_files:
+ comparison.new = new_files
+ return comparison
+
+ new_files_paths = dict()
+ for file in new_files:
+ new_files_paths.update({file.path.lower(): file})
+
+ old_files_paths = dict()
+ for file in old_files:
+ old_files_paths.update({file.path.lower(): file})
+
+ for old_file in old_files_paths.values():
+ if not new_files_paths.get(old_file.path.lower()):
+ comparison.deleted.append(old_file)
+
+ for new_file in new_files_paths.values():
+ old_file = old_files_paths.get(new_file.path.lower())
+ if not old_file:
+ comparison.new.append(new_file)
+ else:
+ if len(new_file.chunks) == 1 and len(old_file.chunks) == 1:
+ if new_file.chunks[0]["md5"] != old_file.chunks[0]["md5"]:
+ comparison.changed.append(new_file)
+ else:
+ if (new_file.md5 and old_file.md5 and new_file.md5 != old_file.md5) or (new_file.sha256 and old_file.sha256 != new_file.sha256):
+ comparison.changed.append(v2.FileDiff.compare(new_file, old_file))
+ elif len(new_file.chunks) != len(old_file.chunks):
+ comparison.changed.append(v2.FileDiff.compare(new_file, old_file))
+ return comparison
diff --git a/app/src/main/python/gogdl/dl/managers/linux.py b/app/src/main/python/gogdl/dl/managers/linux.py
new file mode 100644
index 000000000..26c97708e
--- /dev/null
+++ b/app/src/main/python/gogdl/dl/managers/linux.py
@@ -0,0 +1,19 @@
+"""
+Android-compatible Linux manager (simplified)
+"""
+
+import logging
+from gogdl.dl.managers.v2 import Manager
+
+class LinuxManager(Manager):
+ """Android-compatible Linux download manager"""
+
+ def __init__(self, arguments, unknown_arguments, api_handler, max_workers=2):
+ super().__init__(arguments, unknown_arguments, api_handler, max_workers)
+ self.logger = logging.getLogger("LinuxManager")
+
+ def download(self):
+ """Download Linux game (uses similar logic to Windows)"""
+ self.logger.info(f"Starting Linux download for game {self.game_id}")
+ # For now, use the same V2 logic but with Linux platform
+ super().download()
diff --git a/app/src/main/python/gogdl/dl/managers/manager.py b/app/src/main/python/gogdl/dl/managers/manager.py
new file mode 100644
index 000000000..f65849799
--- /dev/null
+++ b/app/src/main/python/gogdl/dl/managers/manager.py
@@ -0,0 +1,207 @@
+"""
+Android-compatible download manager
+Replaces multiprocessing with threading for Android compatibility
+"""
+
+from dataclasses import dataclass
+import os
+import logging
+import json
+import threading
+from concurrent.futures import ThreadPoolExecutor
+
+from gogdl import constants
+from gogdl.dl.managers import linux, v1, v2
+
+@dataclass
+class UnsupportedPlatform(Exception):
+ pass
+
+class AndroidManager:
+ """Android-compatible version of GOGDL Manager that uses threading instead of multiprocessing"""
+
+ def __init__(self, arguments, unknown_arguments, api_handler):
+ self.arguments = arguments
+ self.unknown_arguments = unknown_arguments
+ self.api_handler = api_handler
+
+ self.platform = arguments.platform
+ self.should_append_folder_name = self.arguments.command == "download"
+ self.is_verifying = self.arguments.command == "repair"
+ self.game_id = arguments.id
+ self.branch = getattr(arguments, 'branch', None)
+
+ # Use a reasonable number of threads for Android
+ if hasattr(arguments, "workers_count"):
+ self.allowed_threads = min(int(arguments.workers_count), 4) # Limit threads on mobile
+ else:
+ self.allowed_threads = 2 # Conservative default for Android
+
+ self.logger = logging.getLogger("AndroidManager")
+
+ def download(self):
+ """Download game using Android-compatible threading"""
+ try:
+ self.logger.info(f"Starting Android download for game {self.game_id}")
+
+ if self.platform == "linux":
+ # Use Linux manager with threading
+ manager = linux.LinuxManager(
+ self.arguments,
+ self.unknown_arguments,
+ self.api_handler,
+ max_workers=self.allowed_threads
+ )
+ manager.download()
+ return
+
+ # Get builds to determine generation
+ builds = self.get_builds(self.platform)
+ if not builds or len(builds['items']) == 0:
+ raise Exception("No builds found")
+
+ # Select target build (same logic as heroic-gogdl)
+ target_build = builds['items'][0] # Default to first build
+
+ # Check for specific branch
+ for build in builds['items']:
+ if build.get("branch") == self.branch:
+ target_build = build
+ break
+
+ # Check for specific build ID
+ if hasattr(self.arguments, 'build') and self.arguments.build:
+ for build in builds['items']:
+ if build.get("build_id") == self.arguments.build:
+ target_build = build
+ break
+
+ # Store builds and target_build as instance attributes for V2 Manager
+ self.builds = builds
+ self.target_build = target_build
+
+ generation = target_build.get("generation", 2)
+ self.logger.info(f"Using build {target_build.get('build_id', 'unknown')} for download (generation: {generation})")
+
+ # Use the correct manager based on generation - same as heroic-gogdl
+ if generation == 1:
+ self.logger.info("Using V1Manager for generation 1 game")
+ manager = v1.Manager(self) # Pass self like V2 does
+ elif generation == 2:
+ self.logger.info("Using V2Manager for generation 2 game")
+ manager = v2.Manager(self)
+ else:
+ raise Exception(f"Unsupported generation: {generation}")
+
+ manager.download()
+
+ except Exception as e:
+ self.logger.error(f"Download failed: {e}")
+ raise
+
+ def setup_download_manager(self):
+ # TODO: If content system for linux ever appears remove this if statement
+ # But keep the one below so we have some sort of fallback
+ # in case not all games were available in content system
+ if self.platform == "linux":
+ self.logger.info(
+ "Platform is Linux, redirecting download to Linux Native installer manager"
+ )
+
+ self.download_manager = linux.Manager(self)
+
+ return
+
+ try:
+ self.builds = self.get_builds(self.platform)
+ except UnsupportedPlatform:
+ if self.platform == "linux":
+ self.logger.info(
+ "Platform is Linux, redirecting download to Linux Native installer manager"
+ )
+
+ self.download_manager = linux.Manager(self)
+
+ return
+
+ self.logger.error(f"Game doesn't support content system api, unable to proceed using platform {self.platform}")
+ exit(1)
+
+ # If Linux download ever progresses to this point, then it's time for some good party
+
+ if len(self.builds["items"]) == 0:
+ self.logger.error("No builds found")
+ exit(1)
+ self.target_build = self.builds["items"][0]
+
+ for build in self.builds["items"]:
+ if build["branch"] == None:
+ self.target_build = build
+ break
+
+ for build in self.builds["items"]:
+ if build["branch"] == self.branch:
+ self.target_build = build
+ break
+
+ if self.arguments.build:
+ # Find build
+ for build in self.builds["items"]:
+ if build["build_id"] == self.arguments.build:
+ self.target_build = build
+ break
+ self.logger.debug(f'Found build {self.target_build}')
+
+ generation = self.target_build["generation"]
+
+ if self.is_verifying:
+ manifest_path = os.path.join(constants.MANIFESTS_DIR, self.game_id)
+ if os.path.exists(manifest_path):
+ with open(manifest_path, 'r') as f:
+ manifest_data = json.load(f)
+ generation = int(manifest_data['version'])
+
+ # This code shouldn't run at all but it's here just in case GOG decides they will return different generation than requested one
+ # Of course assuming they will ever change their content system generation (I highly doubt they will)
+ if generation not in [1, 2]:
+ raise Exception("Unsupported depot version please report this")
+
+ self.logger.info(f"Depot version: {generation}")
+
+ if generation == 1:
+ self.download_manager = v1.Manager(self)
+ elif generation == 2:
+ self.download_manager = v2.Manager(self)
+
+ def calculate_download_size(self, arguments, unknown_arguments):
+ """Calculate download size - same as heroic-gogdl"""
+ try:
+ self.setup_download_manager()
+
+ download_size_response = self.download_manager.get_download_size()
+ download_size_response['builds'] = self.builds
+
+ # Print JSON output like heroic-gogdl does
+ import json
+ print(json.dumps(download_size_response))
+
+ except Exception as e:
+ self.logger.error(f"Calculate download size failed: {e}")
+ raise
+
+ def get_builds(self, build_platform):
+ password_arg = getattr(self.arguments, 'password', None)
+ password = '' if not password_arg else '&password=' + password_arg
+ generation = getattr(self.arguments, 'force_generation', None) or "2"
+ response = self.api_handler.session.get(
+ f"{constants.GOG_CONTENT_SYSTEM}/products/{self.game_id}/os/{build_platform}/builds?&generation={generation}{password}"
+ )
+
+ if not response.ok:
+ raise UnsupportedPlatform()
+ data = response.json()
+
+ if data['total_count'] == 0:
+ raise UnsupportedPlatform()
+
+ return data
diff --git a/app/src/main/python/gogdl/dl/managers/task_executor.py b/app/src/main/python/gogdl/dl/managers/task_executor.py
new file mode 100644
index 000000000..3814a7cf6
--- /dev/null
+++ b/app/src/main/python/gogdl/dl/managers/task_executor.py
@@ -0,0 +1,759 @@
+import logging
+import os
+import signal
+import time
+from sys import exit
+from threading import Thread
+from collections import deque, Counter
+from queue import Queue # Use threading.Queue instead of multiprocessing.Queue
+from threading import Condition
+import tempfile
+from typing import Union
+from gogdl.dl import dl_utils
+
+from gogdl.dl.dl_utils import get_readable_size
+from gogdl.dl.progressbar import ProgressBar
+from gogdl.dl.workers import task_executor
+from gogdl.dl.objects import generic, v2, v1, linux
+
+class ExecutingManager:
+ def __init__(self, api_handler, allowed_threads, path, support, diff, secure_links, game_id=None) -> None:
+ self.api_handler = api_handler
+ self.allowed_threads = allowed_threads
+ self.path = path
+ self.resume_file = os.path.join(path, '.gogdl-resume')
+ self.game_id = game_id # Store game_id for cancellation checking
+ self.support = support or os.path.join(path, 'gog-support')
+ self.cache = os.path.join(path, '.gogdl-download-cache')
+ self.diff: generic.BaseDiff = diff
+ self.secure_links = secure_links
+ self.logger = logging.getLogger("TASK_EXEC")
+ self.logger.info(f"ExecutingManager initialized with game_id: {self.game_id}")
+
+ self.download_size = 0
+ self.disk_size = 0
+
+ # Use temporary directory instead of shared memory on Android
+ self.temp_dir = tempfile.mkdtemp(prefix='gogdl_')
+ self.temp_files = deque()
+ self.hash_map = dict()
+ self.v2_chunks_to_download = deque()
+ self.v1_chunks_to_download = deque()
+ self.linux_chunks_to_download = deque()
+ self.tasks = deque()
+ self.active_tasks = 0
+
+ self.processed_items = 0
+ self.items_to_complete = 0
+
+ self.download_workers = list()
+ self.writer_worker = None
+ self.threads = list()
+
+ self.temp_cond = Condition()
+ self.task_cond = Condition()
+
+ self.running = True
+
+ def setup(self):
+ self.logger.debug("Beginning executor manager setup")
+ self.logger.debug("Initializing queues")
+ # Use threading queues instead of multiprocessing
+ self.download_queue = Queue()
+ self.download_res_queue = Queue()
+ self.writer_queue = Queue()
+ self.writer_res_queue = Queue()
+
+ self.download_speed_updates = Queue()
+ self.writer_speed_updates = Queue()
+
+ # Required space for download to succeed
+ required_disk_size_delta = 0
+
+ # This can be either v1 File or v2 DepotFile
+ for f in self.diff.deleted + self.diff.removed_redist:
+ support_flag = generic.TaskFlag.SUPPORT if 'support' in f.flags else generic.TaskFlag.NONE
+ self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.DELETE_FILE | support_flag))
+ if isinstance(f, v1.File):
+ required_disk_size_delta -= f.size
+ elif isinstance(f, v2.DepotFile):
+ required_disk_size_delta -= sum([ch['size'] for ch in f.chunks])
+
+ current_tmp_size = required_disk_size_delta
+
+ shared_chunks_counter = Counter()
+ completed_files = set()
+
+ missing_files = set()
+ mismatched_files = set()
+
+ downloaded_v1 = dict()
+ downloaded_linux = dict()
+ cached = set()
+
+ # Re-use caches
+ if os.path.exists(self.cache):
+ for cache_file in os.listdir(self.cache):
+ cached.add(cache_file)
+
+ self.biggest_chunk = 0
+ # Find biggest chunk to optimize how much memory is 'wasted' per chunk
+ # Also create hashmap for those files
+ for f in self.diff.new + self.diff.changed + self.diff.redist:
+ if isinstance(f, v1.File):
+ self.hash_map.update({f.path.lower(): f.hash})
+
+ elif isinstance(f, linux.LinuxFile):
+ self.hash_map.update({f.path.lower(): f.hash})
+
+ elif isinstance(f, v2.DepotFile):
+ first_chunk_checksum = f.chunks[0]['md5'] if len(f.chunks) else None
+ checksum = f.md5 or f.sha256 or first_chunk_checksum
+ self.hash_map.update({f.path.lower(): checksum})
+ for i, chunk in enumerate(f.chunks):
+ shared_chunks_counter[chunk["compressedMd5"]] += 1
+ if self.biggest_chunk < chunk["size"]:
+ self.biggest_chunk = chunk["size"]
+
+ elif isinstance(f, v2.FileDiff):
+ first_chunk_checksum = f.file.chunks[0]['md5'] if len(f.file.chunks) else None
+ checksum = f.file.md5 or f.file.sha256 or first_chunk_checksum
+ self.hash_map.update({f.file.path.lower(): checksum})
+ for i, chunk in enumerate(f.file.chunks):
+ if chunk.get("old_offset") is None:
+ shared_chunks_counter[chunk["compressedMd5"]] += 1
+ if self.biggest_chunk < chunk["size"]:
+ self.biggest_chunk = chunk["size"]
+
+ elif isinstance(f, v2.FilePatchDiff):
+ first_chunk_checksum = f.new_file.chunks[0]['md5'] if len(f.new_file.chunks) else None
+ checksum = f.new_file.md5 or f.new_file.sha256 or first_chunk_checksum
+ self.hash_map.update({f.new_file.path.lower(): checksum})
+ for chunk in f.chunks:
+ shared_chunks_counter[chunk["compressedMd5"]] += 1
+ if self.biggest_chunk < chunk["size"]:
+ self.biggest_chunk = chunk["size"]
+
+
+ if not self.biggest_chunk:
+ self.biggest_chunk = 20 * 1024 * 1024
+ else:
+ # Have at least 10 MiB chunk size for V1 downloads
+ self.biggest_chunk = max(self.biggest_chunk, 10 * 1024 * 1024)
+
+ if os.path.exists(self.resume_file):
+ self.logger.info("Attempting to continue the download")
+ try:
+ missing = 0
+ mismatch = 0
+
+ with open(self.resume_file, 'r') as f:
+ for line in f.readlines():
+ hash, support, file_path = line.strip().split(':')
+
+ if support == 'support':
+ abs_path = os.path.join(self.support, file_path)
+ else:
+ abs_path = os.path.join(self.path, file_path)
+
+ if not os.path.exists(dl_utils.get_case_insensitive_name(abs_path)):
+ missing_files.add(file_path.lower())
+ missing += 1
+ continue
+
+ current_hash = self.hash_map.get(file_path.lower())
+ if current_hash != hash:
+ mismatched_files.add(file_path.lower())
+ mismatch += 1
+ continue
+
+ completed_files.add(file_path.lower())
+ if missing:
+ self.logger.warning(f'There are {missing} missing files, and will be re-downloaded')
+ if mismatch:
+ self.logger.warning(f'There are {mismatch} changed files since last download, and will be re-downloaded')
+
+ except Exception as e:
+ self.logger.error(f"Unable to resume download, continuing as normal {e}")
+
+ # Create temp files for chunks instead of using shared memory
+ for i in range(self.allowed_threads * 4): # More temp files than threads
+ temp_file = os.path.join(self.temp_dir, f'chunk_{i}.tmp')
+ self.temp_files.append(temp_file)
+
+ # Create tasks for each chunk
+ for f in self.diff.new + self.diff.changed + self.diff.redist:
+ if isinstance(f, v1.File):
+ support_flag = generic.TaskFlag.SUPPORT if 'support' in f.flags else generic.TaskFlag.NONE
+ if f.size == 0:
+ self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.CREATE_FILE | support_flag))
+ continue
+
+ if f.path.lower() in completed_files:
+ downloaded_v1[f.hash] = f
+ continue
+
+ required_disk_size_delta += f.size
+ # In case of same file we can copy it over
+ if f.hash in downloaded_v1:
+ self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.COPY_FILE | support_flag, old_flags=generic.TaskFlag.SUPPORT if 'support' in downloaded_v1[f.hash].flags else generic.TaskFlag.NONE, old_file=downloaded_v1[f.hash].path))
+ if 'executable' in f.flags:
+ self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.MAKE_EXE | support_flag))
+ continue
+ self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.OPEN_FILE | support_flag))
+ self.download_size += f.size
+ self.disk_size += f.size
+ size_left = f.size
+ chunk_offset = 0
+ i = 0
+ # Split V1 file by chunks, so we can store it in temp files
+ while size_left:
+ chunk_size = min(self.biggest_chunk, size_left)
+ offset = f.offset + chunk_offset
+
+ task = generic.V1Task(f.product_id, i, offset, chunk_size, f.hash)
+ self.tasks.append(task)
+ self.v1_chunks_to_download.append((f.product_id, task.compressed_md5, offset, chunk_size))
+
+ chunk_offset += chunk_size
+ size_left -= chunk_size
+ i += 1
+
+ self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.CLOSE_FILE | support_flag))
+ if 'executable' in f.flags:
+ self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.MAKE_EXE | support_flag))
+ downloaded_v1[f.hash] = f
+
+ elif isinstance(f, linux.LinuxFile):
+ if f.size == 0:
+ self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.CREATE_FILE))
+ continue
+
+ if f.path.lower() in completed_files:
+ downloaded_linux[f.hash] = f
+ continue
+
+ required_disk_size_delta += f.size
+ if f.hash in downloaded_linux:
+ self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.COPY_FILE, old_flags=generic.TaskFlag.NONE, old_file=downloaded_linux[f.hash].path))
+ if 'executable' in f.flags:
+ self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.MAKE_EXE))
+ continue
+
+ self.tasks.append(generic.FileTask(f.path+'.tmp', flags=generic.TaskFlag.OPEN_FILE))
+ self.download_size += f.compressed_size
+ self.disk_size += f.size
+ size_left = f.compressed_size
+ chunk_offset = 0
+ i = 0
+ # Split V1 file by chunks, so we can store it in temp files
+ while size_left:
+ chunk_size = min(self.biggest_chunk, size_left)
+ offset = f.offset + chunk_offset
+
+ task = generic.V1Task(f.product, i, offset, chunk_size, f.hash)
+ self.tasks.append(task)
+ self.linux_chunks_to_download.append((f.product, task.compressed_md5, offset, chunk_size))
+
+ chunk_offset += chunk_size
+ size_left -= chunk_size
+ i += 1
+
+ self.tasks.append(generic.FileTask(f.path + '.tmp', flags=generic.TaskFlag.CLOSE_FILE))
+ if f.compression:
+ self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.OPEN_FILE))
+ self.tasks.append(generic.ChunkTask(f.product, 0, f.hash+"_dec", f.hash+"_dec", f.compressed_size, f.compressed_size, True, False, 0, old_flags=generic.TaskFlag.ZIP_DEC, old_file=f.path+'.tmp'))
+ self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.CLOSE_FILE))
+ self.tasks.append(generic.FileTask(f.path + '.tmp', flags=generic.TaskFlag.DELETE_FILE))
+ else:
+ self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.DELETE_FILE | generic.TaskFlag.RENAME_FILE, old_file=f.path+'.tmp'))
+
+ if 'executable' in f.flags:
+ self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.MAKE_EXE))
+ downloaded_linux[f.hash] = f
+
+ elif isinstance(f, v2.DepotFile):
+ support_flag = generic.TaskFlag.SUPPORT if 'support' in f.flags else generic.TaskFlag.NONE
+ if not len(f.chunks):
+ self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.CREATE_FILE | support_flag))
+ continue
+ if f.path.lower() in completed_files:
+ continue
+ self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.OPEN_FILE | support_flag))
+ for i, chunk in enumerate(f.chunks):
+ new_task = generic.ChunkTask(f.product_id, i, chunk["compressedMd5"], chunk["md5"], chunk["size"], chunk["compressedSize"])
+ is_cached = chunk["md5"] in cached
+ if shared_chunks_counter[chunk["compressedMd5"]] > 1 and not is_cached:
+ self.v2_chunks_to_download.append((f.product_id, chunk["compressedMd5"]))
+ self.download_size += chunk['compressedSize']
+ new_task.offload_to_cache = True
+ new_task.cleanup = True
+ cached.add(chunk["md5"])
+ current_tmp_size += chunk['size']
+ elif is_cached:
+ new_task.old_offset = 0
+ # This can safely be absolute path, due to
+ # how os.path.join works in Writer
+ new_task.old_file = os.path.join(self.cache, chunk["md5"])
+ else:
+ self.v2_chunks_to_download.append((f.product_id, chunk["compressedMd5"]))
+ self.download_size += chunk['compressedSize']
+ self.disk_size += chunk['size']
+ current_tmp_size += chunk['size']
+ shared_chunks_counter[chunk["compressedMd5"]] -= 1
+ new_task.cleanup = True
+ self.tasks.append(new_task)
+ if is_cached and shared_chunks_counter[chunk["compressedMd5"]] == 0:
+ cached.remove(chunk["md5"])
+ self.tasks.append(generic.FileTask(os.path.join(self.cache, chunk["md5"]), flags=generic.TaskFlag.DELETE_FILE))
+ current_tmp_size -= chunk['size']
+ self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.CLOSE_FILE | support_flag))
+ if 'executable' in f.flags:
+ self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.MAKE_EXE | support_flag))
+
+ elif isinstance(f, v2.FileDiff):
+ chunk_tasks = []
+ reused = 0
+ file_size = 0
+ support_flag = generic.TaskFlag.SUPPORT if 'support' in f.file.flags else generic.TaskFlag.NONE
+ old_support_flag = generic.TaskFlag.SUPPORT if 'support' in f.old_file_flags else generic.TaskFlag.NONE
+ if f.file.path.lower() in completed_files:
+ continue
+ for i, chunk in enumerate(f.file.chunks):
+ chunk_task = generic.ChunkTask(f.file.product_id, i, chunk["compressedMd5"], chunk["md5"], chunk["size"], chunk["compressedSize"])
+ file_size += chunk['size']
+ if chunk.get("old_offset") is not None and f.file.path.lower() not in mismatched_files and f.file.path.lower() not in missing_files:
+ chunk_task.old_offset = chunk["old_offset"]
+ chunk_task.old_flags = old_support_flag
+ chunk_task.old_file = f.file.path
+ reused += 1
+
+ chunk_tasks.append(chunk_task)
+ else:
+ is_cached = chunk["md5"] in cached
+ if shared_chunks_counter[chunk["compressedMd5"]] > 1 and not is_cached:
+ self.v2_chunks_to_download.append((f.file.product_id, chunk["compressedMd5"]))
+ self.download_size += chunk['compressedSize']
+ chunk_task.offload_to_cache = True
+ cached.add(chunk["md5"])
+ current_tmp_size += chunk['size']
+ elif is_cached:
+ chunk_task.old_offset = 0
+ chunk_task.old_file = os.path.join(self.cache, chunk["md5"])
+ else:
+ self.v2_chunks_to_download.append((f.file.product_id, chunk["compressedMd5"]))
+ self.download_size += chunk['compressedSize']
+
+ shared_chunks_counter[chunk["compressedMd5"]] -= 1
+ chunk_task.cleanup = True
+ chunk_tasks.append(chunk_task)
+ if is_cached and shared_chunks_counter[chunk["compressedMd5"]] == 0:
+ cached.remove(chunk["md5"])
+ self.tasks.append(generic.FileTask(os.path.join(self.cache, chunk["md5"]), flags=generic.TaskFlag.DELETE_FILE))
+ current_tmp_size -= chunk['size']
+ current_tmp_size += file_size
+ required_disk_size_delta = max(current_tmp_size, required_disk_size_delta)
+ if reused:
+ self.tasks.append(generic.FileTask(f.file.path + ".tmp", flags=generic.TaskFlag.OPEN_FILE | support_flag))
+ self.tasks.extend(chunk_tasks)
+ self.tasks.append(generic.FileTask(f.file.path + ".tmp", flags=generic.TaskFlag.CLOSE_FILE | support_flag))
+ self.tasks.append(generic.FileTask(f.file.path, flags=generic.TaskFlag.RENAME_FILE | generic.TaskFlag.DELETE_FILE | support_flag, old_file=f.file.path + ".tmp"))
+ current_tmp_size -= file_size
+ else:
+ self.tasks.append(generic.FileTask(f.file.path, flags=generic.TaskFlag.OPEN_FILE | support_flag))
+ self.tasks.extend(chunk_tasks)
+ self.tasks.append(generic.FileTask(f.file.path, flags=generic.TaskFlag.CLOSE_FILE | support_flag))
+ if 'executable' in f.file.flags:
+ self.tasks.append(generic.FileTask(f.file.path, flags=generic.TaskFlag.MAKE_EXE | support_flag))
+ self.disk_size += file_size
+
+ elif isinstance(f, v2.FilePatchDiff):
+ chunk_tasks = []
+ patch_size = 0
+ old_file_size = 0
+ out_file_size = 0
+ if f.target.lower() in completed_files:
+ continue
+
+ # Calculate output size
+ for chunk in f.new_file.chunks:
+ out_file_size += chunk['size']
+
+ # Calculate old size
+ for chunk in f.old_file.chunks:
+ old_file_size += chunk['size']
+
+ # Make chunk tasks
+ for i, chunk in enumerate(f.chunks):
+ chunk_task = generic.ChunkTask(f'{f.new_file.product_id}_patch', i, chunk['compressedMd5'], chunk['md5'], chunk['size'], chunk['compressedSize'])
+ chunk_task.cleanup = True
+ patch_size += chunk['size']
+ is_cached = chunk["md5"] in cached
+ if shared_chunks_counter[chunk["compressedMd5"]] > 1 and not is_cached:
+ self.v2_chunks_to_download.append((f'{f.new_file.product_id}_patch', chunk["compressedMd5"]))
+ chunk_task.offload_to_cache = True
+ cached.add(chunk["md5"])
+ self.download_size += chunk['compressedSize']
+ current_tmp_size += chunk['size']
+ required_disk_size_delta = max(current_tmp_size, required_disk_size_delta)
+ elif is_cached:
+ chunk_task.old_offset = 0
+ chunk_task.old_file = os.path.join(self.cache, chunk["md5"])
+ else:
+ self.v2_chunks_to_download.append((f'{f.new_file.product_id}_patch', chunk["compressedMd5"]))
+ self.download_size += chunk['compressedSize']
+ shared_chunks_counter[chunk['compressedMd5']] -= 1
+ chunk_tasks.append(chunk_task)
+ if is_cached and shared_chunks_counter[chunk["compressedMd5"]] == 0:
+ cached.remove(chunk["md5"])
+ self.tasks.append(generic.FileTask(os.path.join(self.cache, chunk["md5"]), flags=generic.TaskFlag.DELETE_FILE))
+ current_tmp_size -= chunk['size']
+
+ self.disk_size += patch_size
+ current_tmp_size += patch_size
+ required_disk_size_delta = max(current_tmp_size, required_disk_size_delta)
+
+ # Download patch
+ self.tasks.append(generic.FileTask(f.target + ".delta", flags=generic.TaskFlag.OPEN_FILE))
+ self.tasks.extend(chunk_tasks)
+ self.tasks.append(generic.FileTask(f.target + ".delta", flags=generic.TaskFlag.CLOSE_FILE))
+
+ current_tmp_size += out_file_size
+ required_disk_size_delta = max(current_tmp_size, required_disk_size_delta)
+
+ # Apply patch to .tmp file
+ self.tasks.append(generic.FileTask(f.target + ".tmp", flags=generic.TaskFlag.PATCH, patch_file=(f.target + '.delta'), old_file=f.source))
+ current_tmp_size -= patch_size
+ required_disk_size_delta = max(current_tmp_size, required_disk_size_delta)
+ # Remove patch file
+ self.tasks.append(generic.FileTask(f.target + ".delta", flags=generic.TaskFlag.DELETE_FILE))
+ current_tmp_size -= old_file_size
+ required_disk_size_delta = max(current_tmp_size, required_disk_size_delta)
+ # Move new file to old one's location
+ self.tasks.append(generic.FileTask(f.target, flags=generic.TaskFlag.RENAME_FILE | generic.TaskFlag.DELETE_FILE, old_file=f.target + ".tmp"))
+ self.disk_size += out_file_size
+
+ required_disk_size_delta = max(current_tmp_size, required_disk_size_delta)
+
+
+ for f in self.diff.links:
+ self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.CREATE_SYMLINK, old_file=f.target))
+
+ self.items_to_complete = len(self.tasks)
+
+ print(get_readable_size(self.download_size), self.download_size)
+ print(get_readable_size(required_disk_size_delta), required_disk_size_delta)
+
+ return dl_utils.check_free_space(required_disk_size_delta, self.path)
+
+
+ def run(self):
+ self.logger.debug(f"Using temp directory: {self.temp_dir}")
+ interrupted = False
+ self.fatal_error = False
+
+ def handle_sig(num, frame):
+ nonlocal interrupted
+ self.interrupt_shutdown()
+ interrupted = True
+ exit(-num)
+
+ try:
+ self.threads.append(Thread(target=self.download_manager, args=(self.task_cond, self.temp_cond)))
+ self.threads.append(Thread(target=self.process_task_results, args=(self.task_cond,)))
+ self.threads.append(Thread(target=self.process_writer_task_results, args=(self.temp_cond,)))
+ self.progress = ProgressBar(self.disk_size, self.download_speed_updates, self.writer_speed_updates, self.game_id)
+
+ # Spawn workers using threads instead of processes
+ self.logger.info(f"Starting {self.allowed_threads} download workers for game {self.game_id}")
+ for i in range(self.allowed_threads):
+ worker = Thread(target=task_executor.download_worker, args=(
+ self.download_queue, self.download_res_queue,
+ self.download_speed_updates, self.secure_links, self.temp_dir, self.game_id
+ ))
+ worker.start()
+ self.download_workers.append(worker)
+
+ self.writer_worker = Thread(target=task_executor.writer_worker, args=(
+ self.writer_queue, self.writer_res_queue,
+ self.writer_speed_updates, self.cache, self.temp_dir
+ ))
+ self.writer_worker.start()
+
+ [th.start() for th in self.threads]
+
+ # Signal handling - Android compatibility
+ try:
+ signal.signal(signal.SIGTERM, handle_sig)
+ signal.signal(signal.SIGINT, handle_sig)
+ except ValueError as e:
+ # Android: signal only works in main thread
+ self.logger.debug(f"Signal handling not available: {e}")
+
+ if self.disk_size:
+ self.progress.start()
+
+ while self.processed_items < self.items_to_complete and not interrupted and not self.fatal_error:
+ # Check for Android cancellation signal
+ try:
+ import builtins
+ flag_name = f'GOGDL_CANCEL_{self.game_id}'
+ if hasattr(builtins, flag_name):
+ flag_value = getattr(builtins, flag_name, False)
+ if flag_value:
+ self.logger.info(f"Download cancelled by user for game {self.game_id}")
+ self.fatal_error = True # Mark as error to prevent completion
+ interrupted = True
+ break
+ except Exception as e:
+ self.logger.debug(f"Error checking cancellation flag: {e}")
+
+ time.sleep(1)
+ if interrupted:
+ return True
+ except KeyboardInterrupt:
+ return True
+
+ self.shutdown()
+ return self.fatal_error
+
+ def interrupt_shutdown(self):
+ self.progress.completed = True
+ self.running = False
+
+ with self.task_cond:
+ self.task_cond.notify()
+
+ with self.temp_cond:
+ self.temp_cond.notify()
+
+ for t in self.threads:
+ t.join(timeout=5.0)
+ if t.is_alive():
+ self.logger.warning(f'Thread did not terminate! {repr(t)}')
+
+ for worker in self.download_workers:
+ worker.join(timeout=5.0)
+
+ def shutdown(self):
+ self.logger.debug("Stopping progressbar")
+ self.progress.completed = True
+
+ self.logger.debug("Sending terminate instruction to workers")
+ for _ in range(self.allowed_threads):
+ self.download_queue.put(generic.TerminateWorker())
+
+ self.writer_queue.put(generic.TerminateWorker())
+
+ for worker in self.download_workers:
+ worker.join(timeout=2)
+
+ if self.writer_worker:
+ self.writer_worker.join(timeout=10)
+
+ self.running = False
+ with self.task_cond:
+ self.task_cond.notify()
+
+ with self.temp_cond:
+ self.temp_cond.notify()
+
+ # Clean up temp directory
+ import shutil
+ try:
+ shutil.rmtree(self.temp_dir)
+ except:
+ self.logger.warning("Failed to clean up temp directory")
+
+ try:
+ if os.path.exists(self.resume_file):
+ os.remove(self.resume_file)
+ except:
+ self.logger.error("Failed to remove resume file")
+
+ def download_manager(self, task_cond: Condition, temp_cond: Condition):
+ self.logger.debug("Starting download scheduler")
+ no_temp = False
+ while self.running:
+ while self.active_tasks <= self.allowed_threads * 2 and (self.v2_chunks_to_download or self.v1_chunks_to_download):
+
+ try:
+ temp_file = self.temp_files.popleft()
+ no_temp = False
+ except IndexError:
+ no_temp = True
+ break
+
+ if self.v1_chunks_to_download:
+ product_id, chunk_id, offset, chunk_size = self.v1_chunks_to_download.popleft()
+
+ try:
+ self.download_queue.put(task_executor.DownloadTask1(product_id, offset, chunk_size, chunk_id, temp_file))
+ self.logger.debug(f"Pushed v1 download to queue {chunk_id} {product_id} {offset} {chunk_size}")
+ self.active_tasks += 1
+ continue
+ except Exception as e:
+ self.logger.warning(f"Failed to push v1 task to download {e}")
+ self.v1_chunks_to_download.appendleft((product_id, chunk_id, offset, chunk_size))
+ self.temp_files.appendleft(temp_file)
+ break
+
+ elif self.v2_chunks_to_download:
+ product_id, chunk_hash = self.v2_chunks_to_download.popleft()
+ try:
+ self.download_queue.put(task_executor.DownloadTask2(product_id, chunk_hash, temp_file))
+ self.logger.debug(f"Pushed DownloadTask2 for {chunk_hash}")
+ self.active_tasks += 1
+ except Exception as e:
+ self.logger.warning(f"Failed to push task to download {e}")
+ self.v2_chunks_to_download.appendleft((product_id, chunk_hash))
+ self.temp_files.appendleft(temp_file)
+ break
+
+ else:
+ with task_cond:
+ self.logger.debug("Waiting for more tasks")
+ task_cond.wait(timeout=1.0)
+ continue
+
+ if no_temp:
+ with temp_cond:
+ self.logger.debug(f"Waiting for more temp files")
+ temp_cond.wait(timeout=1.0)
+
+ self.logger.debug("Download scheduler out..")
+
+ def process_task_results(self, task_cond: Condition):
+ self.logger.debug("Download results collector starting")
+ ready_chunks = dict()
+
+ try:
+ task = self.tasks.popleft()
+ except IndexError:
+ task = None
+
+ current_dest = self.path
+ current_file = ''
+
+ while task and self.running:
+ if isinstance(task, generic.FileTask):
+ try:
+ task_dest = self.path
+ old_destination = self.path
+ if task.flags & generic.TaskFlag.SUPPORT:
+ task_dest = self.support
+ if task.old_flags & generic.TaskFlag.SUPPORT:
+ old_destination = self.support
+
+ writer_task = task_executor.WriterTask(task_dest, task.path, task.flags, old_destination=old_destination, old_file=task.old_file, patch_file=task.patch_file)
+ self.writer_queue.put(writer_task)
+ if task.flags & generic.TaskFlag.OPEN_FILE:
+ current_file = task.path
+ current_dest = task_dest
+ except Exception as e:
+ self.tasks.appendleft(task)
+ self.logger.warning(f"Failed to add queue element {e}")
+ continue
+
+ try:
+ task: Union[generic.ChunkTask, generic.V1Task] = self.tasks.popleft()
+ except IndexError:
+ break
+ continue
+
+ while ((task.compressed_md5 in ready_chunks) or task.old_file):
+ temp_file = None
+ if not task.old_file:
+ temp_file = ready_chunks[task.compressed_md5].temp_file
+
+ try:
+ self.logger.debug(f"Adding {task.compressed_md5} to writer")
+ flags = generic.TaskFlag.NONE
+ old_destination = None
+ if task.cleanup:
+ flags |= generic.TaskFlag.RELEASE_TEMP
+ if task.offload_to_cache:
+ flags |= generic.TaskFlag.OFFLOAD_TO_CACHE
+ if task.old_flags & generic.TaskFlag.SUPPORT:
+ old_destination = self.support
+ self.writer_queue.put(task_executor.WriterTask(current_dest, current_file, flags=flags, temp_file=temp_file, old_destination=old_destination, old_file=task.old_file, old_offset=task.old_offset, size=task.size, hash=task.md5))
+ except Exception as e:
+ self.logger.error(f"Adding to writer queue failed {e}")
+ break
+
+ if task.cleanup and not task.old_file:
+ del ready_chunks[task.compressed_md5]
+
+ try:
+ task = self.tasks.popleft()
+ if isinstance(task, generic.FileTask):
+ break
+ except IndexError:
+ task = None
+ break
+
+ else:
+ try:
+ res: task_executor.DownloadTaskResult = self.download_res_queue.get(timeout=1)
+ if res.success:
+ self.logger.debug(f"Chunk {res.task.compressed_sum} ready")
+ ready_chunks[res.task.compressed_sum] = res
+ self.progress.update_downloaded_size(res.download_size)
+ self.progress.update_decompressed_size(res.decompressed_size)
+ self.active_tasks -= 1
+ else:
+ self.logger.warning(f"Chunk download failed, reason {res.fail_reason}")
+ try:
+ self.download_queue.put(res.task)
+ except Exception as e:
+ self.logger.warning("Failed to resubmit download task")
+
+ with task_cond:
+ task_cond.notify()
+ except:
+ pass
+
+ self.logger.debug("Download results collector exiting...")
+
+ def process_writer_task_results(self, temp_cond: Condition):
+ self.logger.debug("Starting writer results collector")
+ while self.running:
+ try:
+ res: task_executor.WriterTaskResult = self.writer_res_queue.get(timeout=1)
+
+ if isinstance(res.task, generic.TerminateWorker):
+ break
+
+ if res.success and res.task.flags & generic.TaskFlag.CLOSE_FILE and not res.task.file_path.endswith('.delta'):
+ if res.task.file_path.endswith('.tmp'):
+ res.task.file_path = res.task.file_path[:-4]
+
+ checksum = self.hash_map.get(res.task.file_path.lower())
+ if not checksum:
+ self.logger.warning(f"No checksum for closed file, unable to push to resume file {res.task.file_path}")
+ else:
+ if res.task.flags & generic.TaskFlag.SUPPORT:
+ support = "support"
+ else:
+ support = ""
+
+ with open(self.resume_file, 'a') as f:
+ f.write(f"{checksum}:{support}:{res.task.file_path}\n")
+
+ if not res.success:
+ self.logger.fatal("Task writer failed")
+ self.fatal_error = True
+ return
+
+ self.progress.update_bytes_written(res.written)
+ if res.task.flags & generic.TaskFlag.RELEASE_TEMP and res.task.temp_file:
+ self.logger.debug(f"Releasing temp file {res.task.temp_file}")
+ self.temp_files.appendleft(res.task.temp_file)
+ with temp_cond:
+ temp_cond.notify()
+ self.processed_items += 1
+
+ except:
+ continue
+
+ self.logger.debug("Writer results collector exiting...")
diff --git a/app/src/main/python/gogdl/dl/managers/v1.py b/app/src/main/python/gogdl/dl/managers/v1.py
new file mode 100644
index 000000000..eef5f902e
--- /dev/null
+++ b/app/src/main/python/gogdl/dl/managers/v1.py
@@ -0,0 +1,313 @@
+"""
+Android-compatible V1 manager for generation 1 games
+Based on heroic-gogdl v1.py but with Android compatibility
+"""
+
+# Handle old games downloading via V1 depot system
+# V1 is there since GOG 1.0 days, it has no compression and relies on downloading chunks from big main.bin file
+import hashlib
+from sys import exit
+import os
+import logging
+import json
+from typing import Union
+from gogdl import constants
+from gogdl.dl import dl_utils
+from gogdl.dl.managers.dependencies import DependenciesManager
+from gogdl.dl.managers.task_executor import ExecutingManager
+from gogdl.dl.workers.task_executor import DownloadTask1, DownloadTask2, WriterTask
+from gogdl.dl.objects import v1
+from gogdl.languages import Language
+
+
+class Manager:
+ def __init__(self, generic_manager):
+ self.game_id = generic_manager.game_id
+ self.arguments = generic_manager.arguments
+ self.unknown_arguments = generic_manager.unknown_arguments
+ if "path" in self.arguments:
+ self.path = self.arguments.path
+ else:
+ self.path = ""
+
+ if "support_path" in self.arguments:
+ self.support = self.arguments.support_path
+ else:
+ self.support = ""
+
+ self.api_handler = generic_manager.api_handler
+ self.should_append_folder_name = generic_manager.should_append_folder_name
+ self.is_verifying = generic_manager.is_verifying
+ self.allowed_threads = generic_manager.allowed_threads
+
+ self.platform = generic_manager.platform
+
+ self.builds = generic_manager.builds
+ self.build = generic_manager.target_build
+ self.version_name = self.build["version_name"]
+
+ self.lang = Language.parse(self.arguments.lang or "English")
+ self.dlcs_should_be_downloaded = self.arguments.dlcs
+ if self.arguments.dlcs_list:
+ self.dlcs_list = self.arguments.dlcs_list.split(",")
+
+ else:
+ self.dlcs_list = list()
+
+ self.dlc_only = self.arguments.dlc_only
+
+ self.manifest = None
+ self.meta = None
+
+ self.logger = logging.getLogger("V1")
+ self.logger.info("Initialized V1 Download Manager")
+
+ # Get manifest of selected build
+ def get_meta(self):
+ meta_url = self.build["link"]
+ self.meta, headers = dl_utils.get_zlib_encoded(self.api_handler, meta_url)
+ if not self.meta:
+ raise Exception("There was an error obtaining meta")
+ if headers:
+ self.version_etag = headers.get("Etag")
+
+ # Append folder name when downloading
+ if self.should_append_folder_name:
+ self.path = os.path.join(self.path, self.meta["product"]["installDirectory"])
+
+ def get_download_size(self):
+ self.get_meta()
+ dlcs = self.get_dlcs_user_owns(True)
+ self.manifest = v1.Manifest(self.platform, self.meta, self.lang, dlcs, self.api_handler, False)
+
+ build = self.api_handler.get_dependencies_repo()
+ repository = dl_utils.get_zlib_encoded(self.api_handler, build['repository_manifest'])[0] or {}
+
+ size_data = self.manifest.calculate_download_size()
+
+ for depot in repository["depots"]:
+ if depot["dependencyId"] in self.manifest.dependencies_ids:
+ if not depot["executable"]["path"].startswith("__redist"):
+ size_data[self.game_id]['*']["download_size"] += depot["compressedSize"]
+ size_data[self.game_id]['*']["disk_size"] += depot["size"]
+
+ available_branches = set([build["branch"] for build in self.builds["items"] if build["branch"]])
+ available_branches_list = [None] + list(available_branches)
+
+ for dlc in dlcs:
+ dlc.update({"size": size_data[dlc["id"]]})
+
+ response = {
+ "size": size_data[self.game_id],
+ "dlcs": dlcs,
+ "buildId": self.build["legacy_build_id"],
+ "languages": self.manifest.list_languages(),
+ "folder_name": self.meta["product"]["installDirectory"],
+ "dependencies": [dep.id for dep in self.manifest.dependencies],
+ "versionEtag": self.version_etag,
+ "versionName": self.version_name,
+ "available_branches": available_branches_list
+ }
+ return response
+
+
+ def get_dlcs_user_owns(self, info_command=False, requested_dlcs=None):
+ if requested_dlcs is None:
+ requested_dlcs = list()
+ if not self.dlcs_should_be_downloaded and not info_command:
+ return []
+ self.logger.debug("Getting dlcs user owns")
+ dlcs = []
+ if len(requested_dlcs) > 0:
+ for product in self.meta["product"]["gameIDs"]:
+ if (
+ product["gameID"] != self.game_id # Check if not base game
+ and product["gameID"] in requested_dlcs # Check if requested by user
+ and self.api_handler.does_user_own(product["gameID"]) # Check if owned
+ ):
+ dlcs.append({"title": product["name"]["en"], "id": product["gameID"]})
+ return dlcs
+ for product in self.meta["product"]["gameIDs"]:
+ # Check if not base game and if owned
+ if product["gameID"] != self.game_id and self.api_handler.does_user_own(
+ product["gameID"]
+ ):
+ dlcs.append({"title": product["name"]["en"], "id": product["gameID"]})
+ return dlcs
+
+
+ def download(self):
+ manifest_path = os.path.join(constants.MANIFESTS_DIR, self.game_id)
+ old_manifest = None
+
+ # Load old manifest
+ if os.path.exists(manifest_path):
+ with open(manifest_path, "r") as f_handle:
+ try:
+ json_data = json.load(f_handle)
+ old_manifest = dl_utils.create_manifest_class(json_data, self.api_handler)
+ except json.JSONDecodeError:
+ old_manifest = None
+ pass
+
+ if self.is_verifying:
+ if old_manifest:
+ self.manifest = old_manifest
+ old_manifest = None
+ dlcs_user_owns = self.manifest.dlcs or []
+ else:
+ raise Exception("No manifest stored locally, unable to verify")
+ else:
+ self.get_meta()
+ dlcs_user_owns = self.get_dlcs_user_owns(requested_dlcs=self.dlcs_list)
+
+ if self.arguments.dlcs_list:
+ self.logger.info(f"Requested dlcs {self.arguments.dlcs_list}")
+ self.logger.info(f"Owned dlcs {dlcs_user_owns}")
+ self.logger.debug("Parsing manifest")
+ self.manifest = v1.Manifest(self.platform, self.meta, self.lang, dlcs_user_owns, self.api_handler, self.dlc_only)
+
+ if self.manifest:
+ self.manifest.get_files()
+
+ if old_manifest:
+ old_manifest.get_files()
+
+ diff = v1.ManifestDiff.compare(self.manifest, old_manifest)
+
+ self.logger.info(f"{diff}")
+ self.logger.info(f"Old manifest files count: {len(old_manifest.files) if old_manifest else 0}")
+ self.logger.info(f"New manifest files count: {len(self.manifest.files)}")
+
+ # Calculate total expected size
+ total_size = sum(file.size for file in self.manifest.files)
+ self.logger.info(f"Total expected game size: {total_size} bytes ({total_size / (1024*1024):.2f} MB)")
+
+ # Show some example files
+ if self.manifest.files:
+ self.logger.info(f"Example files in manifest:")
+ for i, file in enumerate(self.manifest.files[:5]): # Show first 5 files
+ self.logger.info(f" {file.path}: {file.size} bytes")
+ if len(self.manifest.files) > 5:
+ self.logger.info(f" ... and {len(self.manifest.files) - 5} more files")
+
+
+ has_dependencies = len(self.manifest.dependencies) > 0
+
+ secure_link_endpoints_ids = [product["id"] for product in dlcs_user_owns]
+ if not self.dlc_only:
+ secure_link_endpoints_ids.append(self.game_id)
+ secure_links = dict()
+ for product_id in secure_link_endpoints_ids:
+ secure_links.update(
+ {
+ product_id: dl_utils.get_secure_link(
+ self.api_handler, f"/{self.platform}/{self.manifest.data['product']['timestamp']}/", product_id, generation=1
+ )
+ }
+ )
+
+ dependency_manager = DependenciesManager([dep.id for dep in self.manifest.dependencies], self.path, self.allowed_threads, self.api_handler, download_game_deps_only=True)
+
+ # Find dependencies that are no longer used
+ if old_manifest:
+ removed_dependencies = [id for id in old_manifest.dependencies_ids if id not in self.manifest.dependencies_ids]
+
+ for depot in dependency_manager.repository["depots"]:
+ if depot["dependencyId"] in removed_dependencies and not depot["executable"]["path"].startswith("__redist"):
+ diff.removed_redist += dependency_manager.get_files_for_depot_manifest(depot['manifest'])
+
+ if has_dependencies:
+ secure_links.update({'redist': dl_utils.get_dependency_link(self.api_handler)})
+
+ diff.redist = dependency_manager.get(return_files=True) or []
+
+
+ if not len(diff.changed) and not len(diff.deleted) and not len(diff.new) and not len(diff.redist) and not len(diff.removed_redist):
+ self.logger.info("Nothing to do")
+ return
+
+ if self.is_verifying:
+ new_diff = v1.ManifestDiff()
+ invalid = 0
+ for file in diff.new:
+ # V1 only files
+ if not file.size:
+ continue
+
+ if 'support' in file.flags:
+ file_path = os.path.join(self.support, file.path)
+ else:
+ file_path = os.path.join(self.path, file.path)
+ file_path = dl_utils.get_case_insensitive_name(file_path)
+
+ if not os.path.exists(file_path):
+ invalid += 1
+ new_diff.new.append(file)
+ continue
+
+ with open(file_path, 'rb') as fh:
+ file_sum = hashlib.md5()
+
+ while chunk := fh.read(8 * 1024 * 1024):
+ file_sum.update(chunk)
+
+ if file_sum.hexdigest() != file.hash:
+ invalid += 1
+ new_diff.new.append(file)
+ continue
+
+ for file in diff.redist:
+ if len(file.chunks) == 0:
+ continue
+ file_path = dl_utils.get_case_insensitive_name(os.path.join(self.path, file.path))
+ if not os.path.exists(file_path):
+ invalid += 1
+ new_diff.redist.append(file)
+ continue
+ valid = True
+ with open(file_path, 'rb') as fh:
+ for chunk in file.chunks:
+ chunk_sum = hashlib.md5()
+ chunk_data = fh.read(chunk['size'])
+ chunk_sum.update(chunk_data)
+
+ if chunk_sum.hexdigest() != chunk['md5']:
+ valid = False
+ break
+ if not valid:
+ invalid += 1
+ new_diff.redist.append(file)
+ continue
+ if not invalid:
+ self.logger.info("All files look good")
+ return
+
+ self.logger.info(f"Found {invalid} broken files, repairing...")
+ diff = new_diff
+
+ executor = ExecutingManager(self.api_handler, self.allowed_threads, self.path, self.support, diff, secure_links, self.game_id)
+ success = executor.setup()
+ if not success:
+ print('Unable to proceed, Not enough disk space')
+ exit(2)
+ dl_utils.prepare_location(self.path)
+
+ for dir in self.manifest.dirs:
+ manifest_dir_path = os.path.join(self.path, dir.path)
+ dl_utils.prepare_location(dl_utils.get_case_insensitive_name(manifest_dir_path))
+
+ cancelled = executor.run()
+
+ if cancelled:
+ return
+
+ dl_utils.prepare_location(constants.MANIFESTS_DIR)
+ if self.manifest:
+ with open(manifest_path, 'w') as f_handle:
+ data = self.manifest.serialize_to_json()
+ f_handle.write(data)
+
+ self.logger.info(f"Old manifest files count: {len(old_manifest.files) if old_manifest else 0}")
+ self.logger.info(f"New manifest files count: {len(self.manifest.files)}")
+ self.logger.info(f"Target directory: {self.path}")
\ No newline at end of file
diff --git a/app/src/main/python/gogdl/dl/managers/v2.py b/app/src/main/python/gogdl/dl/managers/v2.py
new file mode 100644
index 000000000..9b51033bd
--- /dev/null
+++ b/app/src/main/python/gogdl/dl/managers/v2.py
@@ -0,0 +1,310 @@
+"""
+Android-compatible V2 manager for Windows game downloads
+"""
+
+# Handle newer depots download
+# This was introduced in GOG Galaxy 2.0, it features compression and files split by chunks
+import json
+from sys import exit
+from gogdl.dl import dl_utils
+import gogdl.dl.objects.v2 as v2
+import hashlib
+from gogdl.dl.managers import dependencies
+from gogdl.dl.managers.task_executor import ExecutingManager
+from gogdl.dl.workers import task_executor
+from gogdl.languages import Language
+from gogdl import constants
+import os
+import logging
+
+
+class Manager:
+ def __init__(self, generic_manager):
+ self.game_id = generic_manager.game_id
+ self.arguments = generic_manager.arguments
+ self.unknown_arguments = generic_manager.unknown_arguments
+ if "path" in self.arguments:
+ self.path = self.arguments.path
+ else:
+ self.path = ""
+ if "support_path" in self.arguments:
+ self.support = self.arguments.support_path
+ else:
+ self.support = ""
+
+ self.allowed_threads = generic_manager.allowed_threads
+
+ self.api_handler = generic_manager.api_handler
+ self.should_append_folder_name = generic_manager.should_append_folder_name
+ self.is_verifying = generic_manager.is_verifying
+
+ self.builds = generic_manager.builds
+ self.build = generic_manager.target_build
+ self.version_name = self.build["version_name"]
+
+ self.lang = Language.parse(self.arguments.lang or "en-US")
+ self.dlcs_should_be_downloaded = self.arguments.dlcs
+ if self.arguments.dlcs_list:
+ self.dlcs_list = self.arguments.dlcs_list.split(",")
+ else:
+ self.dlcs_list = list()
+ self.dlc_only = self.arguments.dlc_only
+
+ self.manifest = None
+ self.stop_all_threads = False
+
+ self.logger = logging.getLogger("V2")
+ self.logger.info("Initialized V2 Download Manager")
+
+ def get_download_size(self):
+ self.get_meta()
+ dlcs = self.get_dlcs_user_owns(info_command=True)
+ self.manifest = v2.Manifest(self.meta, self.lang, dlcs, self.api_handler, False)
+
+ build = self.api_handler.get_dependencies_repo()
+ repository = dl_utils.get_zlib_encoded(self.api_handler, build['repository_manifest'])[0] or {}
+
+ size_data = self.manifest.calculate_download_size()
+
+ for depot in repository["depots"]:
+ if depot["dependencyId"] in self.manifest.dependencies_ids:
+ if not depot["executable"]["path"].startswith("__redist"):
+ size_data[self.game_id]['*']["download_size"] += depot.get("compressedSize") or 0
+ size_data[self.game_id]['*']["disk_size"] += depot.get("size") or 0
+
+ available_branches = set([build["branch"] for build in self.builds["items"] if build["branch"]])
+ available_branches_list = [None] + list(available_branches)
+
+
+ for dlc in dlcs:
+ dlc.update({"size": size_data[dlc["id"]]})
+
+ response = {
+ "size": size_data[self.game_id],
+ "dlcs": dlcs,
+ "buildId": self.build["build_id"],
+ "languages": self.manifest.list_languages(),
+ "folder_name": self.meta["installDirectory"],
+ "dependencies": self.manifest.dependencies_ids,
+ "versionEtag": self.version_etag,
+ "versionName": self.version_name,
+ "available_branches": available_branches_list
+ }
+ return response
+
+ def download(self):
+ manifest_path = os.path.join(constants.MANIFESTS_DIR, self.game_id)
+ old_manifest = None
+
+ # Load old manifest
+ if os.path.exists(manifest_path):
+ self.logger.debug(f"Loading existing manifest for game {self.game_id}")
+ with open(manifest_path, 'r') as f_handle:
+ try:
+ json_data = json.load(f_handle)
+ self.logger.info("Creating Manifest instance from existing manifest")
+ old_manifest = dl_utils.create_manifest_class(json_data, self.api_handler)
+ except json.JSONDecodeError:
+ old_manifest = None
+ pass
+
+ if self.is_verifying:
+ if old_manifest:
+ self.logger.warning("Verifying - ignoring obtained manifest in favor of existing one")
+ self.manifest = old_manifest
+ dlcs_user_owns = self.manifest.dlcs or []
+ old_manifest = None
+ else:
+ raise Exception("No manifest stored locally, unable to verify")
+ else:
+ self.get_meta()
+ dlcs_user_owns = self.get_dlcs_user_owns(
+ requested_dlcs=self.dlcs_list
+ )
+
+ if self.arguments.dlcs_list:
+ self.logger.info(f"Requested dlcs {self.arguments.dlcs_list}")
+ self.logger.info(f"Owned dlcs {dlcs_user_owns}")
+
+ self.logger.debug("Parsing manifest")
+ self.manifest = v2.Manifest(
+ self.meta, self.lang, dlcs_user_owns, self.api_handler, self.dlc_only
+ )
+ patch = None
+ if self.manifest:
+ self.logger.debug("Requesting files of primary manifest")
+ self.manifest.get_files()
+ if old_manifest:
+ self.logger.debug("Requesting files of previous manifest")
+ old_manifest.get_files()
+ patch = v2.Patch.get(self.manifest, old_manifest, self.lang, dlcs_user_owns, self.api_handler)
+ if not patch:
+ self.logger.info("No patch found, falling back to chunk based updates")
+
+ diff = v2.ManifestDiff.compare(self.manifest, old_manifest, patch)
+ self.logger.info(diff)
+
+
+ dependencies_manager = dependencies.DependenciesManager(self.manifest.dependencies_ids, self.path,
+ self.arguments.workers_count, self.api_handler, download_game_deps_only=True)
+
+ # Find dependencies that are no longer used
+ if old_manifest:
+ removed_dependencies = [id for id in old_manifest.dependencies_ids if id not in self.manifest.dependencies_ids]
+
+ for depot in dependencies_manager.repository["depots"]:
+ if depot["dependencyId"] in removed_dependencies and not depot["executable"]["path"].startswith("__redist"):
+ diff.removed_redist += dependencies_manager.get_files_for_depot_manifest(depot['manifest'])
+
+
+ diff.redist = dependencies_manager.get(True) or []
+
+ if not len(diff.changed) and not len(diff.deleted) and not len(diff.new) and not len(diff.redist) and not len(diff.removed_redist):
+ self.logger.info("Nothing to do")
+ return
+ secure_link_endpoints_ids = [product["id"] for product in dlcs_user_owns]
+ if not self.dlc_only:
+ secure_link_endpoints_ids.append(self.game_id)
+ secure_links = dict()
+ for product_id in secure_link_endpoints_ids:
+ secure_links.update(
+ {
+ product_id: dl_utils.get_secure_link(
+ self.api_handler, "/", product_id
+ )
+ }
+ )
+ if patch:
+ secure_links.update(
+ {
+ f"{product_id}_patch": dl_utils.get_secure_link(
+ self.api_handler, "/", product_id, root="/patches/store"
+ )
+ }
+ )
+
+ if len(diff.redist) > 0:
+ secure_links.update(
+ {
+ 'redist': dl_utils.get_dependency_link(self.api_handler)
+ }
+ )
+
+ if self.is_verifying:
+ new_diff = v2.ManifestDiff()
+ invalid = 0
+
+ for file in diff.new:
+ if len(file.chunks) == 0:
+ continue
+ if 'support' in file.flags:
+ file_path = os.path.join(self.support, file.path)
+ else:
+ file_path = os.path.join(self.path, file.path)
+ file_path = dl_utils.get_case_insensitive_name(file_path)
+ if not os.path.exists(file_path):
+ invalid += 1
+ new_diff.new.append(file)
+ continue
+ valid = True
+ with open(file_path, 'rb') as fh:
+ for chunk in file.chunks:
+ chunk_sum = hashlib.md5()
+ chunk_data = fh.read(chunk['size'])
+ chunk_sum.update(chunk_data)
+
+ if chunk_sum.hexdigest() != chunk['md5']:
+ valid = False
+ break
+ if not valid:
+ invalid += 1
+ new_diff.new.append(file)
+ continue
+
+ for file in diff.redist:
+ if len(file.chunks) == 0:
+ continue
+ file_path = dl_utils.get_case_insensitive_name(os.path.join(self.path, file.path))
+ if not os.path.exists(file_path):
+ invalid += 1
+ new_diff.redist.append(file)
+ continue
+ valid = True
+ with open(file_path, 'rb') as fh:
+ for chunk in file.chunks:
+ chunk_sum = hashlib.md5()
+ chunk_data = fh.read(chunk['size'])
+ chunk_sum.update(chunk_data)
+
+ if chunk_sum.hexdigest() != chunk['md5']:
+ valid = False
+ break
+ if not valid:
+ invalid += 1
+ new_diff.redist.append(file)
+ continue
+ for file in diff.links:
+ file_path = os.path.join(self.path, file.path)
+ file_path = dl_utils.get_case_insensitive_name(file_path)
+ if not os.path.exists(file_path):
+ new_diff.links.append(file)
+
+ if not invalid:
+ self.logger.info("All files look good")
+ return
+
+ self.logger.info(f"Found {invalid} broken files, repairing...")
+ diff = new_diff
+
+ executor = ExecutingManager(self.api_handler, self.allowed_threads, self.path, self.support, diff, secure_links, self.game_id)
+ success = executor.setup()
+ if not success:
+ print('Unable to proceed, Not enough disk space')
+ exit(2)
+ dl_utils.prepare_location(self.path)
+
+ for dir in self.manifest.dirs:
+ manifest_dir_path = os.path.join(self.path, dir.path)
+ dl_utils.prepare_location(dl_utils.get_case_insensitive_name(manifest_dir_path))
+ cancelled = executor.run()
+
+ if cancelled:
+ return
+
+ dl_utils.prepare_location(constants.MANIFESTS_DIR)
+ if self.manifest:
+ with open(manifest_path, 'w') as f_handle:
+ data = self.manifest.serialize_to_json()
+ f_handle.write(data)
+
+ def get_meta(self):
+ meta_url = self.build["link"]
+ self.meta, headers = dl_utils.get_zlib_encoded(self.api_handler, meta_url)
+ self.version_etag = headers.get("Etag")
+
+ # Append folder name when downloading
+ if self.should_append_folder_name:
+ self.path = os.path.join(self.path, self.meta["installDirectory"])
+
+ def get_dlcs_user_owns(self, info_command=False, requested_dlcs=None):
+ if requested_dlcs is None:
+ requested_dlcs = list()
+ if not self.dlcs_should_be_downloaded and not info_command:
+ return []
+ self.logger.debug("Getting dlcs user owns")
+ dlcs = []
+ if len(requested_dlcs) > 0:
+ for product in self.meta["products"]:
+ if (
+ product["productId"] != self.game_id
+ and product["productId"] in requested_dlcs
+ and self.api_handler.does_user_own(product["productId"])
+ ):
+ dlcs.append({"title": product["name"], "id": product["productId"]})
+ return dlcs
+ for product in self.meta["products"]:
+ if product["productId"] != self.game_id and self.api_handler.does_user_own(
+ product["productId"]
+ ):
+ dlcs.append({"title": product["name"], "id": product["productId"]})
+ return dlcs
diff --git a/app/src/main/python/gogdl/dl/objects/__init__.py b/app/src/main/python/gogdl/dl/objects/__init__.py
new file mode 100644
index 000000000..587f18fe5
--- /dev/null
+++ b/app/src/main/python/gogdl/dl/objects/__init__.py
@@ -0,0 +1,2 @@
+# Data objects for GOG content system
+from . import v1, v2, generic
diff --git a/app/src/main/python/gogdl/dl/objects/generic.py b/app/src/main/python/gogdl/dl/objects/generic.py
new file mode 100644
index 000000000..a5ecd3344
--- /dev/null
+++ b/app/src/main/python/gogdl/dl/objects/generic.py
@@ -0,0 +1,127 @@
+from dataclasses import dataclass
+from enum import Flag, auto
+from typing import Optional
+
+
+class BaseDiff:
+ def __init__(self):
+ self.deleted = []
+ self.new = []
+ self.changed = []
+ self.redist = []
+ self.removed_redist = []
+
+ self.links = [] # Unix only
+
+ def __str__(self):
+ return f"Deleted: {len(self.deleted)} New: {len(self.new)} Changed: {len(self.changed)}"
+
+class TaskFlag(Flag):
+ NONE = 0
+ SUPPORT = auto()
+ OPEN_FILE = auto()
+ CLOSE_FILE = auto()
+ CREATE_FILE = auto()
+ CREATE_SYMLINK = auto()
+ RENAME_FILE = auto()
+ COPY_FILE = auto()
+ DELETE_FILE = auto()
+ OFFLOAD_TO_CACHE = auto()
+ MAKE_EXE = auto()
+ PATCH = auto()
+ RELEASE_MEM = auto()
+ RELEASE_TEMP = auto()
+ ZIP_DEC = auto()
+
+@dataclass
+class MemorySegment:
+ offset: int
+ end: int
+
+ @property
+ def size(self):
+ return self.end - self.offset
+
+@dataclass
+class ChunkTask:
+ product: str
+ index: int
+
+ compressed_md5: str
+ md5: str
+ size: int
+ download_size: int
+
+ cleanup: bool = False
+ offload_to_cache: bool = False
+ old_offset: Optional[int] = None
+ old_flags: TaskFlag = TaskFlag.NONE
+ old_file: Optional[str] = None
+
+@dataclass
+class V1Task:
+ product: str
+ index: int
+ offset: int
+ size: int
+ md5: str
+ cleanup: Optional[bool] = True
+
+ old_offset: Optional[int] = None
+ offload_to_cache: Optional[bool] = False
+ old_flags: TaskFlag = TaskFlag.NONE
+ old_file: Optional[str] = None
+
+ # This isn't actual sum, but unique id of chunk we use to decide
+ # if we should push it to writer
+ @property
+ def compressed_md5(self):
+ return self.md5 + "_" + str(self.index)
+
+@dataclass
+class Task:
+ flag: TaskFlag
+ file_path: Optional[str] = None
+ file_index: Optional[int] = None
+
+ chunks: Optional[list[ChunkTask]] = None
+
+ target_path: Optional[str] = None
+ source_path: Optional[str] = None
+
+ old_file_index: Optional[int] = None
+
+ data: Optional[bytes] = None
+
+@dataclass
+class FileTask:
+ path: str
+ flags: TaskFlag
+
+ old_flags: TaskFlag = TaskFlag.NONE
+ old_file: Optional[str] = None
+
+ patch_file: Optional[str] = None
+
+@dataclass
+class FileInfo:
+ index: int
+ path: str
+ md5: str
+ size: int
+
+ def __eq__(self, other):
+ if not isinstance(other, FileInfo):
+ return False
+ return (self.path, self.md5, self.size) == (other.path, other.md5, other.size)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __hash__(self):
+ return hash((self.path, self.md5, self.size))
+
+
+@dataclass
+class TerminateWorker:
+ pass
diff --git a/app/src/main/python/gogdl/dl/objects/linux.py b/app/src/main/python/gogdl/dl/objects/linux.py
new file mode 100644
index 000000000..9cd9df2e9
--- /dev/null
+++ b/app/src/main/python/gogdl/dl/objects/linux.py
@@ -0,0 +1,388 @@
+from io import BytesIO
+import stat
+
+
+END_OF_CENTRAL_DIRECTORY = b"\x50\x4b\x05\x06"
+CENTRAL_DIRECTORY = b"\x50\x4b\x01\x02"
+LOCAL_FILE_HEADER = b"\x50\x4b\x03\x04"
+
+# ZIP64
+ZIP_64_END_OF_CD_LOCATOR = b"\x50\x4b\x06\x07"
+ZIP_64_END_OF_CD = b"\x50\x4b\x06\x06"
+
+class LocalFile:
+ def __init__(self) -> None:
+ self.relative_local_file_offset: int
+ self.version_needed: bytes
+ self.general_purpose_bit_flag: bytes
+ self.compression_method: int
+ self.last_modification_time: bytes
+ self.last_modification_date: bytes
+ self.crc32: bytes
+ self.compressed_size: int
+ self.uncompressed_size: int
+ self.file_name_length: int
+ self.extra_field_length: int
+ self.file_name: str
+ self.extra_field: bytes
+ self.last_byte: int
+
+ def load_data(self, handler):
+ return handler.get_bytes_from_file(
+ from_b=self.last_byte + self.relative_local_file_offset,
+ size=self.compressed_size,
+ raw_response=True
+ )
+
+ @classmethod
+ def from_bytes(cls, data, offset, handler):
+ local_file = cls()
+ local_file.relative_local_file_offset = 0
+ local_file.version_needed = data[4:6]
+ local_file.general_purpose_bit_flag = data[6:8]
+ local_file.compression_method = int.from_bytes(data[8:10], "little")
+ local_file.last_modification_time = data[10:12]
+ local_file.last_modification_date = data[12:14]
+ local_file.crc32 = data[14:18]
+ local_file.compressed_size = int.from_bytes(data[18:22], "little")
+ local_file.uncompressed_size = int.from_bytes(data[22:26], "little")
+ local_file.file_name_length = int.from_bytes(data[26:28], "little")
+ local_file.extra_field_length = int.from_bytes(data[28:30], "little")
+
+ extra_data = handler.get_bytes_from_file(
+ from_b=30 + offset,
+ size=local_file.file_name_length + local_file.extra_field_length,
+ )
+
+ local_file.file_name = bytes(
+ extra_data[0: local_file.file_name_length]
+ ).decode()
+
+ local_file.extra_field = data[
+ local_file.file_name_length: local_file.file_name_length
+ + local_file.extra_field_length
+ ]
+ local_file.last_byte = (
+ local_file.file_name_length + local_file.extra_field_length + 30
+ )
+ return local_file
+
+ def __str__(self):
+ return f"\nCompressionMethod: {self.compression_method} \nFileNameLen: {self.file_name_length} \nFileName: {self.file_name} \nCompressedSize: {self.compressed_size} \nUncompressedSize: {self.uncompressed_size}"
+
+
+class CentralDirectoryFile:
+ def __init__(self, product):
+ self.product = product
+ self.version_made_by: bytes
+ self.version_needed_to_extract: bytes
+ self.general_purpose_bit_flag: bytes
+ self.compression_method: int
+ self.last_modification_time: bytes
+ self.last_modification_date: bytes
+ self.crc32: int
+ self.compressed_size: int
+ self.uncompressed_size: int
+ self.file_name_length: int
+ self.extra_field_length: int
+ self.file_comment_length: int
+ self.disk_number_start: bytes
+ self.int_file_attrs: bytes
+ self.ext_file_attrs: bytes
+ self.relative_local_file_offset: int
+ self.file_name: str
+ self.extra_field: BytesIO
+ self.comment: bytes
+ self.last_byte: int
+ self.file_data_offset: int
+
+ @classmethod
+ def from_bytes(cls, data, product):
+ cd_file = cls(product)
+
+ cd_file.version_made_by = data[4:6]
+ cd_file.version_needed_to_extract = data[6:8]
+ cd_file.general_purpose_bit_flag = data[8:10]
+ cd_file.compression_method = int.from_bytes(data[10:12], "little")
+ cd_file.last_modification_time = data[12:14]
+ cd_file.last_modification_date = data[14:16]
+ cd_file.crc32 = int.from_bytes(data[16:20], "little")
+ cd_file.compressed_size = int.from_bytes(data[20:24], "little")
+ cd_file.uncompressed_size = int.from_bytes(data[24:28], "little")
+ cd_file.file_name_length = int.from_bytes(data[28:30], "little")
+ cd_file.extra_field_length = int.from_bytes(data[30:32], "little")
+ cd_file.file_comment_length = int.from_bytes(data[32:34], "little")
+ cd_file.disk_number_start = data[34:36]
+ cd_file.int_file_attrs = data[36:38]
+ cd_file.ext_file_attrs = data[38:42]
+ cd_file.relative_local_file_offset = int.from_bytes(data[42:46], "little")
+ cd_file.file_data_offset = 0
+
+ extra_field_start = 46 + cd_file.file_name_length
+ cd_file.file_name = bytes(data[46:extra_field_start]).decode()
+
+ cd_file.extra_field = BytesIO(data[
+ extra_field_start: extra_field_start + cd_file.extra_field_length
+ ])
+
+ field = None
+ while True:
+ id = int.from_bytes(cd_file.extra_field.read(2), "little")
+ size = int.from_bytes(cd_file.extra_field.read(2), "little")
+
+ if id == 0x01:
+ if cd_file.extra_field_length - cd_file.extra_field.tell() >= size:
+ field = BytesIO(cd_file.extra_field.read(size))
+ break
+
+ cd_file.extra_field.seek(size, 1)
+
+ if cd_file.extra_field_length - cd_file.extra_field.tell() == 0:
+ break
+
+
+ if field:
+ if cd_file.uncompressed_size == 0xFFFFFFFF:
+ cd_file.uncompressed_size = int.from_bytes(field.read(8), "little")
+
+ if cd_file.compressed_size == 0xFFFFFFFF:
+ cd_file.compressed_size = int.from_bytes(field.read(8), "little")
+
+ if cd_file.relative_local_file_offset == 0xFFFFFFFF:
+ cd_file.relative_local_file_offset = int.from_bytes(field.read(8), "little")
+
+ comment_start = extra_field_start + cd_file.extra_field_length
+ cd_file.comment = data[
+ comment_start: comment_start + cd_file.file_comment_length
+ ]
+
+ cd_file.last_byte = comment_start + cd_file.file_comment_length
+
+ return cd_file, comment_start + cd_file.file_comment_length
+
+ def is_symlink(self):
+ return stat.S_ISLNK(int.from_bytes(self.ext_file_attrs, "little") >> 16)
+
+ def as_dict(self):
+ return {'file_name': self.file_name, 'crc32': self.crc32, 'compressed_size': self.compressed_size, 'size': self.uncompressed_size, 'is_symlink': self.is_symlink()}
+
+ def __str__(self):
+ return f"\nCompressionMethod: {self.compression_method} \nFileNameLen: {self.file_name_length} \nFileName: {self.file_name} \nStartDisk: {self.disk_number_start} \nCompressedSize: {self.compressed_size} \nUncompressedSize: {self.uncompressed_size}"
+
+ def __repr__(self):
+ return self.file_name
+
+
+class CentralDirectory:
+ def __init__(self, product):
+ self.files = []
+ self.product = product
+
+ @staticmethod
+ def create_central_dir_file(data, product):
+ return CentralDirectoryFile.from_bytes(data, product)
+
+ @classmethod
+ def from_bytes(cls, data, n, product):
+ central_dir = cls(product)
+ for record in range(n):
+ cd_file, next_offset = central_dir.create_central_dir_file(data, product)
+ central_dir.files.append(cd_file)
+ data = data[next_offset:]
+ if record == 0:
+ continue
+
+ prev_i = record - 1
+ if not (prev_i >= 0 and prev_i < len(central_dir.files)):
+ continue
+ prev = central_dir.files[prev_i]
+ prev.file_data_offset = cd_file.relative_local_file_offset - prev.compressed_size
+
+ return central_dir
+
+class Zip64EndOfCentralDirLocator:
+ def __init__(self):
+ self.number_of_disk: int
+ self.zip64_end_of_cd_offset: int
+ self.total_number_of_disks: int
+
+ @classmethod
+ def from_bytes(cls, data):
+ zip64_end_of_cd = cls()
+ zip64_end_of_cd.number_of_disk = int.from_bytes(data[4:8], "little")
+ zip64_end_of_cd.zip64_end_of_cd_offset = int.from_bytes(data[8:16], "little")
+ zip64_end_of_cd.total_number_of_disks = int.from_bytes(data[16:20], "little")
+ return zip64_end_of_cd
+
+ def __str__(self):
+ return f"\nZIP64EOCDLocator\nDisk Number: {self.number_of_disk}\nZ64_EOCD Offset: {self.zip64_end_of_cd_offset}\nNumber of disks: {self.total_number_of_disks}"
+
+class Zip64EndOfCentralDir:
+ def __init__(self):
+ self.size: int
+ self.version_made_by: bytes
+ self.version_needed: bytes
+ self.number_of_disk: bytes
+ self.central_directory_start_disk: bytes
+ self.number_of_entries_on_this_disk: int
+ self.number_of_entries_total: int
+ self.size_of_central_directory: int
+ self.central_directory_offset: int
+ self.extensible_data = None
+
+ @classmethod
+ def from_bytes(cls, data):
+ end_of_cd = cls()
+
+ end_of_cd.size = int.from_bytes(data[4:12], "little")
+ end_of_cd.version_made_by = data[12:14]
+ end_of_cd.version_needed = data[14:16]
+ end_of_cd.number_of_disk = data[16:20]
+ end_of_cd.central_directory_start_disk = data[20:24]
+ end_of_cd.number_of_entries_on_this_disk = int.from_bytes(data[24:32], "little")
+ end_of_cd.number_of_entries_total = int.from_bytes(data[32:40], "little")
+ end_of_cd.size_of_central_directory = int.from_bytes(data[40:48], "little")
+ end_of_cd.central_directory_offset = int.from_bytes(data[48:56], "little")
+
+ return end_of_cd
+
+ def __str__(self) -> str:
+ return f"\nZ64 EndOfCD\nSize: {self.size}\nNumber of disk: {self.number_of_disk}\nEntries on this disk: {self.number_of_entries_on_this_disk}\nEntries total: {self.number_of_entries_total}\nCD offset: {self.central_directory_offset}"
+
+
+class EndOfCentralDir:
+ def __init__(self):
+ self.number_of_disk: bytes
+ self.central_directory_disk: bytes
+ self.central_directory_records: int
+ self.size_of_central_directory: int
+ self.central_directory_offset: int
+ self.comment_length: bytes
+ self.comment: bytes
+
+ @classmethod
+ def from_bytes(cls, data):
+ central_dir = cls()
+ central_dir.number_of_disk = data[4:6]
+ central_dir.central_directory_disk = data[6:8]
+ central_dir.central_directory_records = int.from_bytes(data[8:10], "little")
+ central_dir.size_of_central_directory = int.from_bytes(data[12:16], "little")
+ central_dir.central_directory_offset = int.from_bytes(data[16:20], "little")
+ central_dir.comment_length = data[20:22]
+ central_dir.comment = data[
+ 22: 22 + int.from_bytes(central_dir.comment_length, "little")
+ ]
+
+ return central_dir
+
+ def __str__(self):
+ return f"\nDiskNumber: {self.number_of_disk} \nCentralDirRecords: {self.central_directory_records} \nCentralDirSize: {self.size_of_central_directory} \nCentralDirOffset: {self.central_directory_offset}"
+
+
+class InstallerHandler:
+ def __init__(self, url, product_id, session):
+ self.url = url
+ self.product = product_id
+ self.session = session
+ self.file_size = 0
+
+ SEARCH_OFFSET = 0
+ SEARCH_RANGE = 2 * 1024 * 1024 # 2 MiB
+
+ beginning_of_file = self.get_bytes_from_file(
+ from_b=SEARCH_OFFSET, size=SEARCH_RANGE, add_archive_index=False
+ )
+
+ self.start_of_archive_index = beginning_of_file.find(LOCAL_FILE_HEADER) + SEARCH_OFFSET
+
+ # ZIP contents
+ self.central_directory_offset: int
+ self.central_directory_records: int
+ self.size_of_central_directory: int
+ self.central_directory: CentralDirectory
+
+ def get_bytes_from_file(self, from_b=-1, size=None, add_archive_index=True, raw_response=False):
+ if add_archive_index:
+ from_b += self.start_of_archive_index
+
+ from_b_repr = str(from_b) if from_b > -1 else ""
+ if size:
+ end_b = from_b + size - 1
+ else:
+ end_b = ""
+ range_header = self.get_range_header(from_b_repr, end_b)
+
+ response = self.session.get(self.url, headers={'Range': range_header},
+ allow_redirects=False, stream=raw_response)
+ if response.status_code == 302:
+ # Skip content-system API
+ self.url = response.headers.get('Location') or self.url
+ return self.get_bytes_from_file(from_b, size, add_archive_index, raw_response)
+ if not self.file_size:
+ self.file_size = int(response.headers.get("Content-Range").split("/")[-1])
+ if raw_response:
+ return response
+ else:
+ data = response.content
+ return data
+
+ @staticmethod
+ def get_range_header(from_b="", to_b=""):
+ return f"bytes={from_b}-{to_b}"
+
+ def setup(self):
+ self.__find_end_of_cd()
+ self.__find_central_directory()
+
+ def __find_end_of_cd(self):
+ end_of_cd_data = self.get_bytes_from_file(
+ from_b=self.file_size - 100, add_archive_index=False
+ )
+
+ end_of_cd_header_data_index = end_of_cd_data.find(END_OF_CENTRAL_DIRECTORY)
+ zip64_end_of_cd_locator_index = end_of_cd_data.find(ZIP_64_END_OF_CD_LOCATOR)
+ assert end_of_cd_header_data_index != -1
+ end_of_cd = EndOfCentralDir.from_bytes(end_of_cd_data[end_of_cd_header_data_index:])
+ if end_of_cd.central_directory_offset == 0xFFFFFFFF:
+ assert zip64_end_of_cd_locator_index != -1
+ # We need to find zip64 headers
+
+ zip64_end_of_cd_locator = Zip64EndOfCentralDirLocator.from_bytes(end_of_cd_data[zip64_end_of_cd_locator_index:])
+ zip64_end_of_cd_data = self.get_bytes_from_file(from_b=zip64_end_of_cd_locator.zip64_end_of_cd_offset, size=200)
+ zip64_end_of_cd = Zip64EndOfCentralDir.from_bytes(zip64_end_of_cd_data)
+
+ self.central_directory_offset = zip64_end_of_cd.central_directory_offset
+ self.size_of_central_directory = zip64_end_of_cd.size_of_central_directory
+ self.central_directory_records = zip64_end_of_cd.number_of_entries_total
+ else:
+ self.central_directory_offset = end_of_cd.central_directory_offset
+ self.size_of_central_directory = end_of_cd.size_of_central_directory
+ self.central_directory_records = end_of_cd.central_directory_records
+
+ def __find_central_directory(self):
+ central_directory_data = self.get_bytes_from_file(
+ from_b=self.central_directory_offset,
+ size=self.size_of_central_directory,
+ )
+
+ assert central_directory_data[:4] == CENTRAL_DIRECTORY
+
+ self.central_directory = CentralDirectory.from_bytes(
+ central_directory_data, self.central_directory_records, self.product
+ )
+ last_entry = self.central_directory.files[-1]
+ last_entry.file_data_offset = self.central_directory_offset - last_entry.compressed_size
+
+
+class LinuxFile:
+ def __init__(self, product, path, compression, start, compressed_size, size, checksum, executable):
+ self.product = product
+ self.path = path
+ self.compression = compression == 8
+ self.offset = start
+ self.compressed_size = compressed_size
+ self.size = size
+ self.hash = str(checksum)
+ self.flags = []
+ if executable:
+ self.flags.append("executable")
diff --git a/app/src/main/python/gogdl/dl/objects/v1.py b/app/src/main/python/gogdl/dl/objects/v1.py
new file mode 100644
index 000000000..41f279b9f
--- /dev/null
+++ b/app/src/main/python/gogdl/dl/objects/v1.py
@@ -0,0 +1,168 @@
+import json
+import os
+from gogdl.dl import dl_utils
+from gogdl.dl.objects import generic, v2
+from gogdl import constants
+from gogdl.languages import Language
+
+
+class Depot:
+ def __init__(self, target_lang, depot_data):
+ self.target_lang = target_lang
+ self.languages = depot_data["languages"]
+ self.game_ids = depot_data["gameIDs"]
+ self.size = int(depot_data["size"])
+ self.manifest = depot_data["manifest"]
+
+ def check_language(self):
+ status = True
+ for lang in self.languages:
+ status = lang == "Neutral" or lang == self.target_lang
+ if status:
+ break
+ return status
+
+class Directory:
+ def __init__(self, item_data):
+ self.path = item_data["path"].replace(constants.NON_NATIVE_SEP, os.sep).lstrip(os.sep)
+
+class Dependency:
+ def __init__(self, data):
+ self.id = data["redist"]
+ self.size = data.get("size")
+ self.target_dir = data.get("targetDir")
+
+
+class File:
+ def __init__(self, data, product_id):
+ self.offset = data.get("offset")
+ self.hash = data.get("hash")
+ self.url = data.get("url")
+ self.path = data["path"].lstrip("/")
+ self.size = data["size"]
+ self.flags = []
+ if data.get("support"):
+ self.flags.append("support")
+ if data.get("executable"):
+ self.flags.append("executble")
+
+ self.product_id = product_id
+
+class Manifest:
+ def __init__(self, platform, meta, language, dlcs, api_handler, dlc_only):
+ self.platform = platform
+ self.data = meta
+ self.data['HGLPlatform'] = platform
+ self.data["HGLInstallLanguage"] = language.code
+ self.data["HGLdlcs"] = dlcs
+ self.product_id = meta["product"]["rootGameID"]
+ self.dlcs = dlcs
+ self.dlc_only = dlc_only
+ self.all_depots = []
+ self.depots = self.parse_depots(language, meta["product"]["depots"])
+ self.dependencies = [Dependency(depot) for depot in meta["product"]["depots"] if depot.get('redist')]
+ self.dependencies_ids = [depot['redist'] for depot in meta["product"]["depots"] if depot.get('redist')]
+
+ self.api_handler = api_handler
+
+ self.files = []
+ self.dirs = []
+
+ @classmethod
+ def from_json(cls, meta, api_handler):
+ manifest = cls(meta['HGLPlatform'], meta, Language.parse(meta['HGLInstallLanguage']), meta["HGLdlcs"], api_handler, False)
+ return manifest
+
+ def serialize_to_json(self):
+ return json.dumps(self.data)
+
+ def parse_depots(self, language, depots):
+ parsed = []
+ dlc_ids = [dlc["id"] for dlc in self.dlcs]
+ for depot in depots:
+ if depot.get("redist"):
+ continue
+
+ for g_id in depot["gameIDs"]:
+ if g_id in dlc_ids or (not self.dlc_only and self.product_id == g_id):
+ new_depot = Depot(language, depot)
+ parsed.append(new_depot)
+ self.all_depots.append(new_depot)
+ break
+ return list(filter(lambda x: x.check_language(), parsed))
+
+ def list_languages(self):
+ languages_dict = set()
+ for depot in self.all_depots:
+ for language in depot.languages:
+ if language != "Neutral":
+ languages_dict.add(Language.parse(language).code)
+
+ return list(languages_dict)
+
+ def calculate_download_size(self):
+ data = dict()
+
+ for depot in self.all_depots:
+ for product_id in depot.game_ids:
+ if not product_id in data:
+ data[product_id] = dict()
+ product_data = data[product_id]
+ for lang in depot.languages:
+ if lang == "Neutral":
+ lang = "*"
+ if not lang in product_data:
+ product_data[lang] = {"download_size": 0, "disk_size": 0}
+
+ product_data[lang]["download_size"] += depot.size
+ product_data[lang]["disk_size"] += depot.size
+
+ return data
+
+
+ def get_files(self):
+ for depot in self.depots:
+ manifest = dl_utils.get_json(self.api_handler, f"{constants.GOG_CDN}/content-system/v1/manifests/{depot.game_ids[0]}/{self.platform}/{self.data['product']['timestamp']}/{depot.manifest}")
+ for record in manifest["depot"]["files"]:
+ if "directory" in record:
+ self.dirs.append(Directory(record))
+ else:
+ self.files.append(File(record, depot.game_ids[0]))
+
+class ManifestDiff(generic.BaseDiff):
+ def __init__(self):
+ super().__init__()
+
+ @classmethod
+ def compare(cls, new_manifest, old_manifest=None):
+ comparison = cls()
+
+ if not old_manifest:
+ comparison.new = new_manifest.files
+ return comparison
+
+ new_files = dict()
+ for file in new_manifest.files:
+ new_files.update({file.path.lower(): file})
+
+ old_files = dict()
+ for file in old_manifest.files:
+ old_files.update({file.path.lower(): file})
+
+ for old_file in old_files.values():
+ if not new_files.get(old_file.path.lower()):
+ comparison.deleted.append(old_file)
+
+ if type(old_manifest) == v2.Manifest:
+ comparison.new = new_manifest.files
+ return comparison
+
+ for new_file in new_files.values():
+ old_file = old_files.get(new_file.path.lower())
+ if not old_file:
+ comparison.new.append(new_file)
+ else:
+ if new_file.hash != old_file.hash:
+ comparison.changed.append(new_file)
+
+ return comparison
\ No newline at end of file
diff --git a/app/src/main/python/gogdl/dl/objects/v2.py b/app/src/main/python/gogdl/dl/objects/v2.py
new file mode 100644
index 000000000..102a71a1c
--- /dev/null
+++ b/app/src/main/python/gogdl/dl/objects/v2.py
@@ -0,0 +1,295 @@
+import json
+import os
+
+from gogdl.dl import dl_utils
+from gogdl.dl.objects import generic, v1
+from gogdl import constants
+from gogdl.languages import Language
+
+
+class DepotFile:
+ def __init__(self, item_data, product_id):
+ self.flags = item_data.get("flags") or list()
+ self.path = item_data["path"].replace(constants.NON_NATIVE_SEP, os.sep).lstrip(os.sep)
+ if "support" in self.flags:
+ self.path = os.path.join(product_id, self.path)
+ self.chunks = item_data["chunks"]
+ self.md5 = item_data.get("md5")
+ self.sha256 = item_data.get("sha256")
+ self.product_id = product_id
+
+
+# That exists in some depots, indicates directory to be created, it has only path in it
+# Yes that's the thing
+class DepotDirectory:
+ def __init__(self, item_data):
+ self.path = item_data["path"].replace(constants.NON_NATIVE_SEP, os.sep).rstrip(os.sep)
+
+class DepotLink:
+ def __init__(self, item_data):
+ self.path = item_data["path"]
+ self.target = item_data["target"]
+
+
+class Depot:
+ def __init__(self, target_lang, depot_data):
+ self.target_lang = target_lang
+ self.languages = depot_data["languages"]
+ self.bitness = depot_data.get("osBitness")
+ self.product_id = depot_data["productId"]
+ self.compressed_size = depot_data.get("compressedSize") or 0
+ self.size = depot_data.get("size") or 0
+ self.manifest = depot_data["manifest"]
+
+ def check_language(self):
+ status = False
+ for lang in self.languages:
+ status = (
+ lang == "*"
+ or self.target_lang == lang
+ )
+ if status:
+ break
+ return status
+
+class Manifest:
+ def __init__(self, meta, language, dlcs, api_handler, dlc_only):
+ self.data = meta
+ self.data["HGLInstallLanguage"] = language.code
+ self.data["HGLdlcs"] = dlcs
+ self.product_id = meta["baseProductId"]
+ self.dlcs = dlcs
+ self.dlc_only = dlc_only
+ self.all_depots = []
+ self.depots = self.parse_depots(language, meta["depots"])
+ self.dependencies_ids = meta.get("dependencies")
+ if not self.dependencies_ids:
+ self.dependencies_ids = list()
+ self.install_directory = meta["installDirectory"]
+
+ self.api_handler = api_handler
+
+ self.files = []
+ self.dirs = []
+
+ @classmethod
+ def from_json(cls, meta, api_handler):
+ manifest = cls(meta, Language.parse(meta["HGLInstallLanguage"]), meta["HGLdlcs"], api_handler, False)
+ return manifest
+
+ def serialize_to_json(self):
+ return json.dumps(self.data)
+
+ def parse_depots(self, language, depots):
+ parsed = []
+ dlc_ids = [dlc["id"] for dlc in self.dlcs]
+ for depot in depots:
+ if depot["productId"] in dlc_ids or (
+ not self.dlc_only and self.product_id == depot["productId"]
+ ):
+ new_depot = Depot(language, depot)
+ parsed.append(new_depot)
+ self.all_depots.append(new_depot)
+
+
+ return list(filter(lambda x: x.check_language(), parsed))
+
+ def list_languages(self):
+ languages_dict = set()
+ for depot in self.all_depots:
+ for language in depot.languages:
+ if language != "*":
+ languages_dict.add(Language.parse(language).code)
+
+ return list(languages_dict)
+
+ def calculate_download_size(self):
+ data = dict()
+
+ for depot in self.all_depots:
+ if not depot.product_id in data:
+ data[depot.product_id] = dict()
+ data[depot.product_id]['*'] = {"download_size": 0, "disk_size": 0}
+ product_data = data[depot.product_id]
+ for lang in depot.languages:
+ if not lang in product_data:
+ product_data[lang] = {"download_size":0, "disk_size":0}
+
+ product_data[lang]["download_size"] += depot.compressed_size
+ product_data[lang]["disk_size"] += depot.size
+
+ return data
+
+ def get_files(self):
+ for depot in self.depots:
+ manifest = dl_utils.get_zlib_encoded(
+ self.api_handler,
+ f"{constants.GOG_CDN}/content-system/v2/meta/{dl_utils.galaxy_path(depot.manifest)}",
+ )[0]
+ for item in manifest["depot"]["items"]:
+ if item["type"] == "DepotFile":
+ self.files.append(DepotFile(item, depot.product_id))
+ elif item["type"] == "DepotLink":
+ self.files.append(DepotLink(item))
+ else:
+ self.dirs.append(DepotDirectory(item))
+
+class FileDiff:
+ def __init__(self):
+ self.file: DepotFile
+ self.old_file_flags: list[str]
+ self.disk_size_diff: int = 0
+
+ @classmethod
+ def compare(cls, new: DepotFile, old: DepotFile):
+ diff = cls()
+ diff.disk_size_diff = sum([ch['size'] for ch in new.chunks])
+ diff.disk_size_diff -= sum([ch['size'] for ch in old.chunks])
+ diff.old_file_flags = old.flags
+ for new_chunk in new.chunks:
+ old_offset = 0
+ for old_chunk in old.chunks:
+ if old_chunk["md5"] == new_chunk["md5"]:
+ new_chunk["old_offset"] = old_offset
+ old_offset += old_chunk["size"]
+ diff.file = new
+ return diff
+
+# Using xdelta patching
+class FilePatchDiff:
+ def __init__(self, data):
+ self.md5_source = data['md5_source']
+ self.md5_target = data['md5_target']
+ self.source = data['path_source'].replace('\\', '/')
+ self.target = data['path_target'].replace('\\', '/')
+ self.md5 = data['md5']
+ self.chunks = data['chunks']
+
+ self.old_file: DepotFile
+ self.new_file: DepotFile
+
+class ManifestDiff(generic.BaseDiff):
+ def __init__(self):
+ super().__init__()
+
+ @classmethod
+ def compare(cls, manifest, old_manifest=None, patch=None):
+ comparison = cls()
+ is_manifest_upgrade = isinstance(old_manifest, v1.Manifest)
+
+ if not old_manifest:
+ comparison.new = manifest.files
+ return comparison
+
+ new_files = dict()
+ for file in manifest.files:
+ new_files.update({file.path.lower(): file})
+
+ old_files = dict()
+ for file in old_manifest.files:
+ old_files.update({file.path.lower(): file})
+
+ for old_file in old_files.values():
+ if not new_files.get(old_file.path.lower()):
+ comparison.deleted.append(old_file)
+
+ for new_file in new_files.values():
+ old_file = old_files.get(new_file.path.lower())
+ if isinstance(new_file, DepotLink):
+ comparison.links.append(new_file)
+ continue
+ if not old_file:
+ comparison.new.append(new_file)
+ else:
+ if is_manifest_upgrade:
+ if len(new_file.chunks) == 0:
+ continue
+ new_final_sum = new_file.md5 or new_file.chunks[0]["md5"]
+ if new_final_sum:
+ if old_file.hash != new_final_sum:
+ comparison.changed.append(new_file)
+ continue
+
+ patch_file = None
+ if patch and len(old_file.chunks):
+ for p_file in patch.files:
+ old_final_sum = old_file.md5 or old_file.chunks[0]["md5"]
+ if p_file.md5_source == old_final_sum:
+ patch_file = p_file
+ patch_file.old_file = old_file
+ patch_file.new_file = new_file
+
+ if patch_file:
+ comparison.changed.append(patch_file)
+ continue
+
+ if len(new_file.chunks) == 1 and len(old_file.chunks) == 1:
+ if new_file.chunks[0]["md5"] != old_file.chunks[0]["md5"]:
+ comparison.changed.append(new_file)
+ else:
+ if (new_file.md5 and old_file.md5 and new_file.md5 != old_file.md5) or (new_file.sha256 and old_file.sha256 and old_file.sha256 != new_file.sha256):
+ comparison.changed.append(FileDiff.compare(new_file, old_file))
+ elif len(new_file.chunks) != len(old_file.chunks):
+ comparison.changed.append(FileDiff.compare(new_file, old_file))
+ return comparison
+
+class Patch:
+ def __init__(self):
+ self.patch_data = {}
+ self.files = []
+
+ @classmethod
+ def get(cls, manifest, old_manifest, lang: str, dlcs: list, api_handler):
+ if isinstance(manifest, v1.Manifest) or isinstance(old_manifest, v1.Manifest):
+ return None
+ from_build = old_manifest.data.get('buildId')
+ to_build = manifest.data.get('buildId')
+ if not from_build or not to_build:
+ return None
+ dlc_ids = [dlc["id"] for dlc in dlcs]
+ patch_meta = dl_utils.get_zlib_encoded(api_handler, f'{constants.GOG_CONTENT_SYSTEM}/products/{manifest.product_id}/patches?_version=4&from_build_id={from_build}&to_build_id={to_build}')[0]
+ if not patch_meta or patch_meta.get('error'):
+ return None
+ patch_data = dl_utils.get_zlib_encoded(api_handler, patch_meta['link'])[0]
+ if not patch_data:
+ return None
+
+ if patch_data['algorithm'] != 'xdelta3':
+ print("Unsupported patch algorithm")
+ return None
+
+ depots = []
+ # Get depots we need
+ for depot in patch_data['depots']:
+ if depot['productId'] == patch_data['baseProductId'] or depot['productId'] in dlc_ids:
+ if lang in depot['languages']:
+ depots.append(depot)
+
+ if not depots:
+ return None
+
+ files = []
+ fail = False
+ for depot in depots:
+ depotdiffs = dl_utils.get_zlib_encoded(api_handler, f'{constants.GOG_CDN}/content-system/v2/patches/meta/{dl_utils.galaxy_path(depot["manifest"])}')[0]
+ if not depotdiffs:
+ fail = True
+ break
+ for diff in depotdiffs['depot']['items']:
+ if diff['type'] == 'DepotDiff':
+ files.append(FilePatchDiff(diff))
+ else:
+ print('Unknown type in patcher', diff['type'])
+ return None
+
+ if fail:
+ # TODO: Handle this beter
+ # Maybe exception?
+ print("Failed to get patch manifests")
+ return None
+
+ patch = cls()
+ patch.patch_data = patch_data
+ patch.files = files
+
+ return patch
diff --git a/app/src/main/python/gogdl/dl/progressbar.py b/app/src/main/python/gogdl/dl/progressbar.py
new file mode 100644
index 000000000..6cd0470e7
--- /dev/null
+++ b/app/src/main/python/gogdl/dl/progressbar.py
@@ -0,0 +1,125 @@
+import queue
+from multiprocessing import Queue
+import threading
+import logging
+from time import sleep, time
+
+
+class ProgressBar(threading.Thread):
+ def __init__(self, max_val: int, speed_queue: Queue, write_queue: Queue, game_id=None):
+ self.logger = logging.getLogger("PROGRESS")
+ self.downloaded = 0
+ self.total = max_val
+ self.speed_queue = speed_queue
+ self.write_queue = write_queue
+ self.started_at = time()
+ self.last_update = time()
+ self.completed = False
+ self.game_id = game_id
+
+ self.decompressed = 0
+
+ self.downloaded_since_last_update = 0
+ self.decompressed_since_last_update = 0
+ self.written_since_last_update = 0
+ self.read_since_last_update = 0
+
+ self.written_total = 0
+
+ super().__init__(target=self.loop)
+
+ def loop(self):
+ while not self.completed:
+ # Check for cancellation signal
+ if self.game_id:
+ try:
+ import builtins
+ flag_name = f'GOGDL_CANCEL_{self.game_id}'
+ if hasattr(builtins, flag_name) and getattr(builtins, flag_name, False):
+ self.logger.info(f"Progress reporting cancelled for game {self.game_id}")
+ self.completed = True
+ break
+ except:
+ pass
+
+ self.print_progressbar()
+ self.downloaded_since_last_update = self.decompressed_since_last_update = 0
+ self.written_since_last_update = self.read_since_last_update = 0
+ timestamp = time()
+ while not self.completed and (time() - timestamp) < 1:
+ try:
+ dl, dec = self.speed_queue.get(timeout=1)
+ self.downloaded_since_last_update += dl
+ self.decompressed_since_last_update += dec
+ except queue.Empty:
+ pass
+ try:
+ wr, r = self.write_queue.get(timeout=1)
+ self.written_since_last_update += wr
+ self.read_since_last_update += r
+ except queue.Empty:
+ pass
+
+ self.print_progressbar()
+ def print_progressbar(self):
+ percentage = (self.written_total / self.total) * 100
+ running_time = time() - self.started_at
+ runtime_h = int(running_time // 3600)
+ runtime_m = int((running_time % 3600) // 60)
+ runtime_s = int((running_time % 3600) % 60)
+
+ print_time_delta = time() - self.last_update
+
+ current_dl_speed = 0
+ current_decompress = 0
+ if print_time_delta:
+ current_dl_speed = self.downloaded_since_last_update / print_time_delta
+ current_decompress = self.decompressed_since_last_update / print_time_delta
+ current_w_speed = self.written_since_last_update / print_time_delta
+ current_r_speed = self.read_since_last_update / print_time_delta
+ else:
+ current_w_speed = 0
+ current_r_speed = 0
+
+ if percentage > 0:
+ estimated_time = (100 * running_time) / percentage - running_time
+ else:
+ estimated_time = 0
+ estimated_time = max(estimated_time, 0) # Cap to 0
+
+ estimated_h = int(estimated_time // 3600)
+ estimated_time = estimated_time % 3600
+ estimated_m = int(estimated_time // 60)
+ estimated_s = int(estimated_time % 60)
+
+ self.logger.info(
+ f"= Progress: {percentage:.02f} {self.written_total}/{self.total}, "
+ + f"Running for: {runtime_h:02d}:{runtime_m:02d}:{runtime_s:02d}, "
+ + f"ETA: {estimated_h:02d}:{estimated_m:02d}:{estimated_s:02d}"
+ )
+
+ self.logger.info(
+ f"= Downloaded: {self.downloaded / 1024 / 1024:.02f} MiB, "
+ f"Written: {self.written_total / 1024 / 1024:.02f} MiB"
+ )
+
+ self.logger.info(
+ f" + Download\t- {current_dl_speed / 1024 / 1024:.02f} MiB/s (raw) "
+ f"/ {current_decompress / 1024 / 1024:.02f} MiB/s (decompressed)"
+ )
+
+ self.logger.info(
+ f" + Disk\t- {current_w_speed / 1024 / 1024:.02f} MiB/s (write) / "
+ f"{current_r_speed / 1024 / 1024:.02f} MiB/s (read)"
+ )
+
+ self.last_update = time()
+
+ def update_downloaded_size(self, addition):
+ self.downloaded += addition
+
+ def update_decompressed_size(self, addition):
+ self.decompressed += addition
+
+ def update_bytes_written(self, addition):
+ self.written_total += addition
diff --git a/app/src/main/python/gogdl/dl/workers/task_executor.py b/app/src/main/python/gogdl/dl/workers/task_executor.py
new file mode 100644
index 000000000..f105c482e
--- /dev/null
+++ b/app/src/main/python/gogdl/dl/workers/task_executor.py
@@ -0,0 +1,366 @@
+import os
+import shutil
+import sys
+import stat
+import traceback
+import time
+import requests
+import zlib
+import hashlib
+from io import BytesIO
+from typing import Optional, Union
+from copy import copy, deepcopy
+from gogdl.dl import dl_utils
+from dataclasses import dataclass
+from enum import Enum, auto
+from gogdl.dl.objects.generic import TaskFlag, TerminateWorker
+from gogdl.xdelta import patcher
+
+
+class FailReason(Enum):
+ UNKNOWN = 0
+ CHECKSUM = auto()
+ CONNECTION = auto()
+ UNAUTHORIZED = auto()
+ MISSING_CHUNK = auto()
+
+
+@dataclass
+class DownloadTask:
+ product_id: str
+
+@dataclass
+class DownloadTask1(DownloadTask):
+ offset: int
+ size: int
+ compressed_sum: str
+ temp_file: str # Use temp file instead of memory segment
+
+@dataclass
+class DownloadTask2(DownloadTask):
+ compressed_sum: str
+ temp_file: str # Use temp file instead of memory segment
+
+
+@dataclass
+class WriterTask:
+ destination: str
+ file_path: str
+ flags: TaskFlag
+
+ hash: Optional[str] = None
+ size: Optional[int] = None
+ temp_file: Optional[str] = None # Use temp file instead of shared memory
+ old_destination: Optional[str] = None
+ old_file: Optional[str] = None
+ old_offset: Optional[int] = None
+ patch_file: Optional[str] = None
+
+@dataclass
+class DownloadTaskResult:
+ success: bool
+ fail_reason: Optional[FailReason]
+ task: Union[DownloadTask2, DownloadTask1]
+ temp_file: Optional[str] = None
+ download_size: Optional[int] = None
+ decompressed_size: Optional[int] = None
+
+@dataclass
+class WriterTaskResult:
+ success: bool
+ task: Union[WriterTask, TerminateWorker]
+ written: int = 0
+
+
+def download_worker(download_queue, results_queue, speed_queue, secure_links, temp_dir, game_id):
+ """Download worker function that runs in a thread"""
+ session = requests.session()
+
+ while True:
+ # Check for cancellation signal before processing next task
+ try:
+ import builtins
+ flag_name = f'GOGDL_CANCEL_{game_id}'
+ if hasattr(builtins, flag_name) and getattr(builtins, flag_name, False):
+ session.close()
+ return # Exit worker thread if cancelled
+ except:
+ pass # Continue if cancellation check fails
+
+ try:
+ task: Union[DownloadTask1, DownloadTask2, TerminateWorker] = download_queue.get(timeout=1)
+ except:
+ continue
+
+ if isinstance(task, TerminateWorker):
+ break
+
+ if type(task) == DownloadTask2:
+ download_v2_chunk(task, session, secure_links, results_queue, speed_queue, game_id)
+ elif type(task) == DownloadTask1:
+ download_v1_chunk(task, session, secure_links, results_queue, speed_queue, game_id)
+
+ session.close()
+
+
+def download_v2_chunk(task: DownloadTask2, session, secure_links, results_queue, speed_queue, game_id):
+ retries = 5
+ urls = secure_links[task.product_id]
+ compressed_md5 = task.compressed_sum
+
+ endpoint = deepcopy(urls[0]) # Use deepcopy for thread safety
+ if task.product_id != 'redist':
+ endpoint["parameters"]["path"] += f"/{dl_utils.galaxy_path(compressed_md5)}"
+ url = dl_utils.merge_url_with_params(
+ endpoint["url_format"], endpoint["parameters"]
+ )
+ else:
+ endpoint["url"] += "/" + dl_utils.galaxy_path(compressed_md5)
+ url = endpoint["url"]
+
+ buffer = bytes()
+ compressed_sum = hashlib.md5()
+ download_size = 0
+ response = None
+
+ while retries > 0:
+ buffer = bytes()
+ compressed_sum = hashlib.md5()
+ download_size = 0
+ decompressor = zlib.decompressobj()
+
+ try:
+ response = session.get(url, stream=True, timeout=10)
+ response.raise_for_status()
+ for chunk in response.iter_content(1024 * 512):
+ # Check for cancellation during download
+ try:
+ import builtins
+ flag_name = f'GOGDL_CANCEL_{game_id}'
+ if hasattr(builtins, flag_name) and getattr(builtins, flag_name, False):
+ return # Exit immediately if cancelled
+ except:
+ pass
+
+ download_size += len(chunk)
+ compressed_sum.update(chunk)
+ decompressed = decompressor.decompress(chunk)
+ buffer += decompressed
+ speed_queue.put((len(chunk), len(decompressed)))
+
+ except Exception as e:
+ print("Connection failed", e)
+ if response and response.status_code == 401:
+ results_queue.put(DownloadTaskResult(False, FailReason.UNAUTHORIZED, task))
+ return
+ retries -= 1
+ time.sleep(2)
+ continue
+ break
+ else:
+ results_queue.put(DownloadTaskResult(False, FailReason.CHECKSUM, task))
+ return
+
+ decompressed_size = len(buffer)
+
+ # Write to temp file instead of shared memory
+ try:
+ with open(task.temp_file, 'wb') as f:
+ f.write(buffer)
+ except Exception as e:
+ print("ERROR writing temp file", e)
+ results_queue.put(DownloadTaskResult(False, FailReason.UNKNOWN, task))
+ return
+
+ if compressed_sum.hexdigest() != compressed_md5:
+ results_queue.put(DownloadTaskResult(False, FailReason.CHECKSUM, task))
+ return
+
+ results_queue.put(DownloadTaskResult(True, None, task, temp_file=task.temp_file, download_size=download_size, decompressed_size=decompressed_size))
+
+
+def download_v1_chunk(task: DownloadTask1, session, secure_links, results_queue, speed_queue, game_id):
+ retries = 5
+ urls = secure_links[task.product_id]
+
+ response = None
+ if type(urls) == str:
+ url = urls
+ else:
+ endpoint = deepcopy(urls[0])
+ endpoint["parameters"]["path"] += "/main.bin"
+ url = dl_utils.merge_url_with_params(
+ endpoint["url_format"], endpoint["parameters"]
+ )
+ range_header = dl_utils.get_range_header(task.offset, task.size)
+
+ # Stream directly to temp file for V1 to avoid memory issues with large files
+ download_size = 0
+ while retries > 0:
+ download_size = 0
+ try:
+ response = session.get(url, stream=True, timeout=10, headers={'Range': range_header})
+ response.raise_for_status()
+
+ # Stream directly to temp file instead of loading into memory
+ with open(task.temp_file, 'wb') as temp_f:
+ for chunk in response.iter_content(1024 * 512): # 512KB chunks
+ # Check for cancellation during download
+ try:
+ import builtins
+ flag_name = f'GOGDL_CANCEL_{game_id}'
+ if hasattr(builtins, flag_name) and getattr(builtins, flag_name, False):
+ return # Exit immediately if cancelled
+ except:
+ pass
+
+ temp_f.write(chunk)
+ download_size += len(chunk)
+ speed_queue.put((len(chunk), len(chunk)))
+
+ except Exception as e:
+ print("Connection failed", e)
+ if response and response.status_code == 401:
+ results_queue.put(DownloadTaskResult(False, FailReason.UNAUTHORIZED, task))
+ return
+ retries -= 1
+ time.sleep(2)
+ continue
+ break
+ else:
+ results_queue.put(DownloadTaskResult(False, FailReason.CHECKSUM, task))
+ return
+
+ # Verify file size
+ if download_size != task.size:
+ results_queue.put(DownloadTaskResult(False, FailReason.CHECKSUM, task))
+ return
+
+ results_queue.put(DownloadTaskResult(True, None, task, temp_file=task.temp_file, download_size=download_size, decompressed_size=download_size))
+
+
+def writer_worker(writer_queue, results_queue, speed_queue, cache, temp_dir):
+ """Writer worker function that runs in a thread"""
+ file_handle = None
+ current_file = ''
+
+ while True:
+ try:
+ task: Union[WriterTask, TerminateWorker] = writer_queue.get(timeout=2)
+ except:
+ continue
+
+ if isinstance(task, TerminateWorker):
+ results_queue.put(WriterTaskResult(True, task))
+ break
+
+ written = 0
+
+ task_path = dl_utils.get_case_insensitive_name(os.path.join(task.destination, task.file_path))
+ split_path = os.path.split(task_path)
+ if split_path[0] and not os.path.exists(split_path[0]):
+ dl_utils.prepare_location(split_path[0])
+
+ if task.flags & TaskFlag.CREATE_FILE:
+ open(task_path, 'a').close()
+ results_queue.put(WriterTaskResult(True, task))
+ continue
+
+ elif task.flags & TaskFlag.OPEN_FILE:
+ if file_handle:
+ print("Opening on unclosed file")
+ file_handle.close()
+ file_handle = open(task_path, 'wb')
+ current_file = task_path
+ results_queue.put(WriterTaskResult(True, task))
+ continue
+
+ elif task.flags & TaskFlag.CLOSE_FILE:
+ if file_handle:
+ file_handle.close()
+ file_handle = None
+ results_queue.put(WriterTaskResult(True, task))
+ continue
+
+ elif task.flags & TaskFlag.COPY_FILE:
+ if file_handle and task.file_path == current_file:
+ print("Copy on unclosed file")
+ file_handle.close()
+ file_handle = None
+
+ if not task.old_file:
+ results_queue.put(WriterTaskResult(False, task))
+ continue
+
+ dest = task.old_destination or task.destination
+ try:
+ shutil.copy(dl_utils.get_case_insensitive_name(os.path.join(dest, task.old_file)), task_path)
+ except shutil.SameFileError:
+ pass
+ except Exception:
+ results_queue.put(WriterTaskResult(False, task))
+ continue
+ results_queue.put(WriterTaskResult(True, task))
+ continue
+
+ elif task.flags & TaskFlag.MAKE_EXE:
+ if file_handle and task.file_path == current_file:
+ print("Making exe on unclosed file")
+ file_handle.close()
+ file_handle = None
+ if sys.platform != 'win32':
+ try:
+ st = os.stat(task_path)
+ os.chmod(task_path, st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
+ except Exception as e:
+ results_queue.put(WriterTaskResult(False, task))
+ continue
+ results_queue.put(WriterTaskResult(True, task))
+ continue
+
+ try:
+ if task.temp_file:
+ if not task.size:
+ print("No size")
+ results_queue.put(WriterTaskResult(False, task))
+ continue
+
+ # Read from temp file instead of shared memory
+ with open(task.temp_file, 'rb') as temp_f:
+ left = task.size
+ while left > 0:
+ chunk = temp_f.read(min(1024 * 1024, left))
+ written += file_handle.write(chunk)
+ speed_queue.put((len(chunk), 0))
+ left -= len(chunk)
+
+ if task.flags & TaskFlag.OFFLOAD_TO_CACHE and task.hash:
+ cache_file_path = os.path.join(cache, task.hash)
+ dl_utils.prepare_location(cache)
+ shutil.copy(task.temp_file, cache_file_path)
+ speed_queue.put((task.size, 0))
+
+ elif task.old_file:
+ if not task.size:
+ print("No size")
+ results_queue.put(WriterTaskResult(False, task))
+ continue
+ dest = task.old_destination or task.destination
+ old_file_path = dl_utils.get_case_insensitive_name(os.path.join(dest, task.old_file))
+ old_file_handle = open(old_file_path, "rb")
+ if task.old_offset:
+ old_file_handle.seek(task.old_offset)
+ left = task.size
+ while left > 0:
+ chunk = old_file_handle.read(min(1024*1024, left))
+ data = chunk
+ written += file_handle.write(data)
+ speed_queue.put((len(data), len(chunk)))
+ left -= len(chunk)
+ old_file_handle.close()
+
+ except Exception as e:
+ print("Writer exception", e)
+ results_queue.put(WriterTaskResult(False, task))
+ else:
+ results_queue.put(WriterTaskResult(True, task, written=written))
\ No newline at end of file
diff --git a/app/src/main/python/gogdl/imports.py b/app/src/main/python/gogdl/imports.py
new file mode 100644
index 000000000..b633c0864
--- /dev/null
+++ b/app/src/main/python/gogdl/imports.py
@@ -0,0 +1,130 @@
+import os
+import glob
+import json
+import logging
+from sys import exit
+from gogdl import constants
+import requests
+
+
+def get_info(args, unknown_args):
+ logger = logging.getLogger("IMPORT")
+ path = args.path
+ if not os.path.exists(path):
+ logger.error("Provided path is invalid!")
+ exit(1)
+ game_details = load_game_details(path)
+
+ info_file = game_details[0]
+ build_id_file = game_details[1]
+ platform = game_details[2]
+ with_dlcs = game_details[3]
+ build_id = ""
+ installed_language = None
+ info = {}
+ if platform != "linux":
+ if not info_file:
+ print("Error importing, no info file")
+ return
+ f = open(info_file, "r")
+ info = json.loads(f.read())
+ f.close()
+
+ title = info["name"]
+ game_id = info["rootGameId"]
+ build_id = info.get("buildId")
+ if "languages" in info:
+ installed_language = info["languages"][0]
+ elif "language" in info:
+ installed_language = info["language"]
+ else:
+ installed_language = "en-US"
+ if build_id_file:
+ f = open(build_id_file, "r")
+ build = json.loads(f.read())
+ f.close()
+ build_id = build.get("buildId")
+
+ version_name = build_id
+ if build_id and platform != "linux":
+ # Get version name
+ builds_res = requests.get(
+ f"{constants.GOG_CONTENT_SYSTEM}/products/{game_id}/os/{platform}/builds?generation=2",
+ headers={
+ "User-Agent": "GOGGalaxyCommunicationService/2.0.4.164 (Windows_32bit)"
+ },
+ )
+ builds = builds_res.json()
+ target_build = builds["items"][0]
+ for build in builds["items"]:
+ if build["build_id"] == build_id:
+ target_build = build
+ break
+ version_name = target_build["version_name"]
+ if platform == "linux" and os.path.exists(os.path.join(path, "gameinfo")):
+ # Linux version installed using installer
+ gameinfo_file = open(os.path.join(path, "gameinfo"), "r")
+ data = gameinfo_file.read()
+ lines = data.split("\n")
+ title = lines[0]
+ version_name = lines[1]
+
+ if not installed_language:
+ installed_language = lines[3]
+ if len(lines) > 4:
+ game_id = lines[4]
+ build_id = lines[6]
+ else:
+ game_id = None
+ build_id = None
+ print(
+ json.dumps(
+ {
+ "appName": game_id,
+ "buildId": build_id,
+ "title": title,
+ "tasks": info["playTasks"] if info and info.get("playTasks") else None,
+ "installedLanguage": installed_language,
+ "dlcs": with_dlcs,
+ "platform": platform,
+ "versionName": version_name,
+ }
+ )
+ )
+
+
+def load_game_details(path):
+ base_path = path
+ found = glob.glob(os.path.join(path, "goggame-*.info"))
+ build_id = glob.glob(os.path.join(path, "goggame-*.id"))
+ platform = "windows"
+ if not found:
+ base_path = os.path.join(path, "Contents", "Resources")
+ found = glob.glob(os.path.join(path, "Contents", "Resources", "goggame-*.info"))
+ build_id = glob.glob(
+ os.path.join(path, "Contents", "Resources", "goggame-*.id")
+ )
+ platform = "osx"
+ if not found:
+ base_path = os.path.join(path, "game")
+ found = glob.glob(os.path.join(path, "game", "goggame-*.info"))
+ build_id = glob.glob(os.path.join(path, "game", "goggame-*.id"))
+ platform = "linux"
+ if not found:
+ if os.path.exists(os.path.join(path, "gameinfo")):
+ return (None, None, "linux", [])
+
+ root_id = None
+ # Array of DLC game ids
+ dlcs = []
+ for info in found:
+ with open(info) as info_file:
+ data = json.load(info_file)
+ if not root_id:
+ root_id = data.get("rootGameId")
+ if data["gameId"] == root_id:
+ continue
+
+ dlcs.append(data["gameId"])
+
+ return (os.path.join(base_path, f"goggame-{root_id}.info"), os.path.join(base_path, f"goggame-{root_id}.id") if build_id else None, platform, dlcs)
diff --git a/app/src/main/python/gogdl/languages.py b/app/src/main/python/gogdl/languages.py
new file mode 100644
index 000000000..ca37cebee
--- /dev/null
+++ b/app/src/main/python/gogdl/languages.py
@@ -0,0 +1,123 @@
+from dataclasses import dataclass
+
+
+@dataclass
+class Language:
+ code: str
+ name: str
+ native_name: str
+ deprecated_codes: list[str]
+
+ def __eq__(self, value: object) -> bool:
+ # Compare the class by language code
+ if isinstance(value, Language):
+ return self.code == value.code
+ # If comparing to string, look for the code, name and deprecated code
+ if type(value) is str:
+ return (
+ value == self.code
+ or value.lower() == self.name.lower()
+ or value in self.deprecated_codes
+ )
+ return NotImplemented
+
+ def __hash__(self):
+ return hash(self.code)
+
+ def __repr__(self):
+ return self.code
+
+ @staticmethod
+ def parse(val: str):
+ for lang in LANGUAGES:
+ if lang == val:
+ return lang
+
+
+# Auto-generated list of languages
+LANGUAGES = [
+ Language("af-ZA", "Afrikaans", "Afrikaans", []),
+ Language("ar", "Arabic", "العربية", []),
+ Language("az-AZ", "Azeri", "Azərbaycanılı", []),
+ Language("be-BY", "Belarusian", "Беларускі", ["be"]),
+ Language("bn-BD", "Bengali", "বাংলা", ["bn_BD"]),
+ Language("bg-BG", "Bulgarian", "български", ["bg", "bl"]),
+ Language("bs-BA", "Bosnian", "босански", []),
+ Language("ca-ES", "Catalan", "Català", ["ca"]),
+ Language("cs-CZ", "Czech", "Čeština", ["cz"]),
+ Language("cy-GB", "Welsh", "Cymraeg", []),
+ Language("da-DK", "Danish", "Dansk", ["da"]),
+ Language("de-DE", "German", "Deutsch", ["de"]),
+ Language("dv-MV", "Divehi", "ދިވެހިބަސް", []),
+ Language("el-GR", "Greek", "ελληνικά", ["gk", "el-GK"]),
+ Language("en-GB", "British English", "British English", ["en_GB"]),
+ Language("en-US", "English", "English", ["en"]),
+ Language("es-ES", "Spanish", "Español", ["es"]),
+ Language("es-MX", "Latin American Spanish", "Español (AL)", ["es_mx"]),
+ Language("et-EE", "Estonian", "Eesti", ["et"]),
+ Language("eu-ES", "Basque", "Euskara", []),
+ Language("fa-IR", "Persian", "فارسى", ["fa"]),
+ Language("fi-FI", "Finnish", "Suomi", ["fi"]),
+ Language("fo-FO", "Faroese", "Føroyskt", []),
+ Language("fr-FR", "French", "Français", ["fr"]),
+ Language("gl-ES", "Galician", "Galego", []),
+ Language("gu-IN", "Gujarati", "ગુજરાતી", ["gu"]),
+ Language("he-IL", "Hebrew", "עברית", ["he"]),
+ Language("hi-IN", "Hindi", "हिंदी", ["hi"]),
+ Language("hr-HR", "Croatian", "Hrvatski", []),
+ Language("hu-HU", "Hungarian", "Magyar", ["hu"]),
+ Language("hy-AM", "Armenian", "Հայերեն", []),
+ Language("id-ID", "Indonesian", "Bahasa Indonesia", []),
+ Language("is-IS", "Icelandic", "Íslenska", ["is"]),
+ Language("it-IT", "Italian", "Italiano", ["it"]),
+ Language("ja-JP", "Japanese", "日本語", ["jp"]),
+ Language("jv-ID", "Javanese", "ꦧꦱꦗꦮ", ["jv"]),
+ Language("ka-GE", "Georgian", "ქართული", []),
+ Language("kk-KZ", "Kazakh", "Қазақ", []),
+ Language("kn-IN", "Kannada", "ಕನ್ನಡ", []),
+ Language("ko-KR", "Korean", "한국어", ["ko"]),
+ Language("kok-IN", "Konkani", "कोंकणी", []),
+ Language("ky-KG", "Kyrgyz", "Кыргыз", []),
+ Language("la", "Latin", "latine", []),
+ Language("lt-LT", "Lithuanian", "Lietuvių", []),
+ Language("lv-LV", "Latvian", "Latviešu", []),
+ Language("ml-IN", "Malayalam", "മലയാളം", ["ml"]),
+ Language("mi-NZ", "Maori", "Reo Māori", []),
+ Language("mk-MK", "Macedonian", "Mакедонски јазик", []),
+ Language("mn-MN", "Mongolian", "Монгол хэл", []),
+ Language("mr-IN", "Marathi", "मराठी", ["mr"]),
+ Language("ms-MY", "Malay", "Bahasa Malaysia", []),
+ Language("mt-MT", "Maltese", "Malti", []),
+ Language("nb-NO", "Norwegian", "Norsk", ["no"]),
+ Language("nl-NL", "Dutch", "Nederlands", ["nl"]),
+ Language("ns-ZA", "Northern Sotho", "Sesotho sa Leboa", []),
+ Language("pa-IN", "Punjabi", "ਪੰਜਾਬੀ", []),
+ Language("pl-PL", "Polish", "Polski", ["pl"]),
+ Language("ps-AR", "Pashto", "پښتو", []),
+ Language("pt-BR", "Portuguese (Brazilian)", "Português do Brasil", ["br"]),
+ Language("pt-PT", "Portuguese", "Português", ["pt"]),
+ Language("ro-RO", "Romanian", "Română", ["ro"]),
+ Language("ru-RU", "Russian", "Pусский", ["ru"]),
+ Language("sa-IN", "Sanskrit", "संस्कृत", []),
+ Language("sk-SK", "Slovak", "Slovenčina", ["sk"]),
+ Language("sl-SI", "Slovenian", "Slovenski", []),
+ Language("sq-AL", "Albanian", "Shqipe", []),
+ Language("sr-SP", "Serbian", "Srpski", ["sb"]),
+ Language("sv-SE", "Swedish", "Svenska", ["sv"]),
+ Language("sw-KE", "Kiswahili", "Kiswahili", []),
+ Language("ta-IN", "Tamil", "தமிழ்", ["ta_IN"]),
+ Language("te-IN", "Telugu", "తెలుగు", ["te"]),
+ Language("th-TH", "Thai", "ไทย", ["th"]),
+ Language("tl-PH", "Tagalog", "Filipino", []),
+ Language("tn-ZA", "Setswana", "Setswana", []),
+ Language("tr-TR", "Turkish", "Türkçe", ["tr"]),
+ Language("tt-RU", "Tatar", "Татар", []),
+ Language("uk-UA", "Ukrainian", "Українська", ["uk"]),
+ Language("ur-PK", "Urdu", "اُردو", ["ur_PK"]),
+ Language("uz-UZ", "Uzbek", "U'zbek", []),
+ Language("vi-VN", "Vietnamese", "Tiếng Việt", ["vi"]),
+ Language("xh-ZA", "isiXhosa", "isiXhosa", []),
+ Language("zh-Hans", "Chinese (Simplified)", "中文(简体)", ["zh_Hans", "zh", "cn"]),
+ Language("zh-Hant", "Chinese (Traditional)", "中文(繁體)", ["zh_Hant"]),
+ Language("zu-ZA", "isiZulu", "isiZulu", []),
+]
diff --git a/app/src/main/python/gogdl/launch.py b/app/src/main/python/gogdl/launch.py
new file mode 100644
index 000000000..ab3a96253
--- /dev/null
+++ b/app/src/main/python/gogdl/launch.py
@@ -0,0 +1,284 @@
+import os
+import json
+import sys
+import subprocess
+import time
+from gogdl.dl.dl_utils import get_case_insensitive_name
+from ctypes import *
+from gogdl.process import Process
+import signal
+import shutil
+import shlex
+
+class NoMoreChildren(Exception):
+ pass
+
+def get_flatpak_command(id: str) -> list[str]:
+ if sys.platform != "linux":
+ return []
+ new_process_command = []
+ process_command = ["flatpak", "info", id]
+ if os.path.exists("/.flatpak-info"):
+ try:
+ spawn_test = subprocess.run(["flatpak-spawn", "--host", "ls"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ except FileNotFoundError:
+ return []
+ if spawn_test.returncode != 0:
+ return []
+
+ new_process_command = ["flatpak-spawn", "--host"]
+ process_command = new_process_command + process_command
+
+ try:
+ output = subprocess.run(process_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+
+ if output.returncode == 0:
+ return new_process_command + ["flatpak", "run", id]
+
+ except FileNotFoundError:
+ pass
+ return []
+
+
+# Supports launching linux builds
+def launch(arguments, unknown_args):
+ # print(arguments)
+ info = load_game_info(arguments.path, arguments.id, arguments.platform)
+
+ wrapper = []
+ if arguments.wrapper:
+ wrapper = shlex.split(arguments.wrapper)
+ envvars = {}
+
+ unified_platform = {"win32": "windows", "darwin": "osx", "linux": "linux"}
+ command = list()
+ working_dir = arguments.path
+ heroic_exe_wrapper = os.environ.get("HEROIC_GOGDL_WRAPPER_EXE")
+ # If type is a string we know it's a path to start.sh on linux
+ if type(info) != str:
+ if sys.platform != "win32":
+ if not arguments.dont_use_wine and arguments.platform != unified_platform[sys.platform]:
+ if arguments.wine_prefix:
+ envvars["WINEPREFIX"] = arguments.wine_prefix
+ wrapper.append(arguments.wine)
+
+ primary_task = get_preferred_task(info, arguments.preferred_task)
+ launch_arguments = primary_task.get("arguments")
+ compatibility_flags = primary_task.get("compatibilityFlags")
+ executable = os.path.join(arguments.path, primary_task["path"])
+ if arguments.platform == "linux":
+ executable = os.path.join(arguments.path, "game", primary_task["path"])
+ if launch_arguments is None:
+ launch_arguments = []
+ if type(launch_arguments) == str:
+ launch_arguments = launch_arguments.replace('\\', '/')
+ launch_arguments = shlex.split(launch_arguments)
+ if compatibility_flags is None:
+ compatibility_flags = []
+
+ relative_working_dir = (
+ primary_task["workingDir"] if primary_task.get("workingDir") else ""
+ )
+ if sys.platform != "win32":
+ relative_working_dir = relative_working_dir.replace("\\", os.sep)
+ executable = executable.replace("\\", os.sep)
+ working_dir = os.path.join(arguments.path, relative_working_dir)
+
+ if not os.path.exists(executable):
+ executable = get_case_insensitive_name(executable)
+ # Handle case sensitive file systems
+ if not os.path.exists(working_dir):
+ working_dir = get_case_insensitive_name(working_dir)
+
+ os.chdir(working_dir)
+
+ if sys.platform != "win32" and arguments.platform == 'windows' and not arguments.override_exe:
+ if "scummvm.exe" in executable.lower():
+ flatpak_scummvm = get_flatpak_command("org.scummvm.ScummVM")
+ native_scummvm = shutil.which("scummvm")
+ if native_scummvm:
+ native_scummvm = [native_scummvm]
+
+ native_runner = flatpak_scummvm or native_scummvm
+ if native_runner:
+ wrapper = native_runner
+ executable = None
+ elif "dosbox.exe" in executable.lower():
+ flatpak_dosbox = get_flatpak_command("io.github.dosbox-staging")
+ native_dosbox= shutil.which("dosbox")
+ if native_dosbox:
+ native_dosbox = [native_dosbox]
+
+ native_runner = flatpak_dosbox or native_dosbox
+ if native_runner:
+ wrapper = native_runner
+ executable = None
+
+ if len(wrapper) > 0 and wrapper[0] is not None:
+ command.extend(wrapper)
+
+ if heroic_exe_wrapper:
+ command.append(heroic_exe_wrapper.strip())
+
+ if arguments.override_exe:
+ command.append(arguments.override_exe)
+ working_dir = os.path.split(arguments.override_exe)[0]
+ if not os.path.exists(working_dir):
+ working_dir = get_case_insensitive_name(working_dir)
+ elif executable:
+ command.append(executable)
+ command.extend(launch_arguments)
+ else:
+ if len(wrapper) > 0 and wrapper[0] is not None:
+ command.extend(wrapper)
+
+ if heroic_exe_wrapper:
+ command.append(heroic_exe_wrapper.strip())
+
+ if arguments.override_exe:
+ command.append(arguments.override_exe)
+ working_dir = os.path.split(arguments.override_exe)[0]
+ # Handle case sensitive file systems
+ if not os.path.exists(working_dir):
+ working_dir = get_case_insensitive_name(working_dir)
+ else:
+ command.append(info)
+
+ os.chdir(working_dir)
+ command.extend(unknown_args)
+ environment = os.environ.copy()
+ environment.update(envvars)
+
+ if getattr(sys, 'frozen', False) and hasattr(sys, '_MEIPASS'):
+ bundle_dir = sys._MEIPASS
+ ld_library = environment.get("LD_LIBRARY_PATH")
+ if ld_library:
+ splitted = ld_library.split(":")
+ try:
+ splitted.remove(bundle_dir)
+ except ValueError:
+ pass
+ environment.update({"LD_LIBRARY_PATH": ":".join(splitted)})
+
+ print("Launch command:", command)
+
+ status = None
+ if sys.platform == 'linux':
+ libc = cdll.LoadLibrary("libc.so.6")
+ prctl = libc.prctl
+ result = prctl(36 ,1, 0, 0, 0, 0) # PR_SET_CHILD_SUBREAPER = 36
+
+ if result == -1:
+ print("PR_SET_CHILD_SUBREAPER is not supported by your kernel (Linux 3.4 and above)")
+
+ process = subprocess.Popen(command, env=environment)
+ process_pid = process.pid
+
+ def iterate_processes():
+ for child in Process(os.getpid()).iter_children():
+ if child.state == 'Z':
+ continue
+
+ if child.name:
+ yield child
+
+ def hard_sig_handler(signum, _frame):
+ for _ in range(3): # just in case we race a new process.
+ for child in Process(os.getpid()).iter_children():
+ try:
+ os.kill(child.pid, signal.SIGKILL)
+ except ProcessLookupError:
+ pass
+
+
+ def sig_handler(signum, _frame):
+ signal.signal(signal.SIGTERM, hard_sig_handler)
+ signal.signal(signal.SIGINT, hard_sig_handler)
+ for _ in range(3): # just in case we race a new process.
+ for child in Process(os.getpid()).iter_children():
+ try:
+ os.kill(child.pid, signal.SIGTERM)
+ except ProcessLookupError:
+ pass
+
+ def is_alive():
+ return next(iterate_processes(), None) is not None
+
+ signal.signal(signal.SIGTERM, sig_handler)
+ signal.signal(signal.SIGINT, sig_handler)
+
+ def reap_children():
+ nonlocal status
+ while True:
+ try:
+ child_pid, child_returncode, _resource_usage = os.wait3(os.WNOHANG)
+ except ChildProcessError:
+ raise NoMoreChildren from None # No processes remain.
+ if child_pid == process_pid:
+ status = child_returncode
+
+ if child_pid == 0:
+ break
+
+ try:
+ # The initial wait loop:
+ # the initial process may have been excluded. Wait for the game
+ # to be considered "started".
+ if not is_alive():
+ while not is_alive():
+ reap_children()
+ time.sleep(0.1)
+ while is_alive():
+ reap_children()
+ time.sleep(0.1)
+ reap_children()
+ except NoMoreChildren:
+ print("All processes exited")
+
+
+ else:
+ process = subprocess.Popen(command, env=environment,
+ shell=sys.platform=="win32")
+ status = process.wait()
+
+ sys.exit(status)
+
+
+def get_preferred_task(info, index):
+ primaryTask = None
+ for task in info["playTasks"]:
+ if task.get("isPrimary") == True:
+ primaryTask = task
+ break
+ if index is None:
+ return primaryTask
+ indexI = int(index)
+ if len(info["playTasks"]) > indexI:
+ return info["playTasks"][indexI]
+
+ return primaryTask
+
+
+
+
+def load_game_info(path, id, platform):
+ filename = f"goggame-{id}.info"
+ abs_path = (
+ (
+ os.path.join(path, filename)
+ if platform == "windows"
+ else os.path.join(path, "start.sh")
+ )
+ if platform != "osx"
+ else os.path.join(path, "Contents", "Resources", filename)
+ )
+ if not os.path.isfile(abs_path):
+ sys.exit(1)
+ if platform == "linux":
+ return abs_path
+ with open(abs_path) as f:
+ data = f.read()
+ f.close()
+ return json.loads(data)
+
+
diff --git a/app/src/main/python/gogdl/process.py b/app/src/main/python/gogdl/process.py
new file mode 100644
index 000000000..c54cac082
--- /dev/null
+++ b/app/src/main/python/gogdl/process.py
@@ -0,0 +1,138 @@
+import os
+
+
+class InvalidPid(Exception):
+
+ """Exception raised when an operation on a non-existent PID is called"""
+
+
+class Process:
+
+ """Python abstraction a Linux process"""
+
+ def __init__(self, pid):
+ try:
+ self.pid = int(pid)
+ self.error_cache = []
+ except ValueError as err:
+ raise InvalidPid("'%s' is not a valid pid" % pid) from err
+
+ def __repr__(self):
+ return "Process {}".format(self.pid)
+
+ def __str__(self):
+ return "{} ({}:{})".format(self.name, self.pid, self.state)
+
+ def _read_content(self, file_path):
+ """Return the contents from a file in /proc"""
+ try:
+ with open(file_path, encoding='utf-8') as proc_file:
+ content = proc_file.read()
+ except (ProcessLookupError, FileNotFoundError, PermissionError):
+ return ""
+ return content
+
+ def get_stat(self, parsed=True):
+ stat_filename = "/proc/{}/stat".format(self.pid)
+ try:
+ with open(stat_filename, encoding='utf-8') as stat_file:
+ _stat = stat_file.readline()
+ except (ProcessLookupError, FileNotFoundError):
+ return None
+ if parsed:
+ return _stat[_stat.rfind(")") + 1:].split()
+ return _stat
+
+ def get_thread_ids(self):
+ """Return a list of thread ids opened by process."""
+ basedir = "/proc/{}/task/".format(self.pid)
+ if os.path.isdir(basedir):
+ try:
+ return os.listdir(basedir)
+ except FileNotFoundError:
+ return []
+ else:
+ return []
+
+ def get_children_pids_of_thread(self, tid):
+ """Return pids of child processes opened by thread `tid` of process."""
+ children_path = "/proc/{}/task/{}/children".format(self.pid, tid)
+ try:
+ with open(children_path, encoding='utf-8') as children_file:
+ children_content = children_file.read()
+ except (FileNotFoundError, ProcessLookupError):
+ children_content = ""
+ return children_content.strip().split()
+
+ @property
+ def name(self):
+ """Filename of the executable."""
+ _stat = self.get_stat(parsed=False)
+ if _stat:
+ return _stat[_stat.find("(") + 1:_stat.rfind(")")]
+ return None
+
+ @property
+ def state(self):
+ """One character from the string "RSDZTW" where R is running, S is
+ sleeping in an interruptible wait, D is waiting in uninterruptible disk
+ sleep, Z is zombie, T is traced or stopped (on a signal), and W is
+ paging.
+ """
+ _stat = self.get_stat()
+ if _stat:
+ return _stat[0]
+ return None
+
+ @property
+ def cmdline(self):
+ """Return command line used to run the process `pid`."""
+ cmdline_path = "/proc/{}/cmdline".format(self.pid)
+ _cmdline_content = self._read_content(cmdline_path)
+ if _cmdline_content:
+ return _cmdline_content.replace("\x00", " ").replace("\\", "/")
+
+ @property
+ def cwd(self):
+ """Return current working dir of process"""
+ cwd_path = "/proc/%d/cwd" % int(self.pid)
+ return os.readlink(cwd_path)
+
+ @property
+ def environ(self):
+ """Return the process' environment variables"""
+ environ_path = "/proc/{}/environ".format(self.pid)
+ _environ_text = self._read_content(environ_path)
+ if not _environ_text:
+ return {}
+ try:
+ return dict([line.split("=", 1) for line in _environ_text.split("\x00") if line])
+ except ValueError:
+ if environ_path not in self.error_cache:
+ self.error_cache.append(environ_path)
+ return {}
+
+ @property
+ def children(self):
+ """Return the child processes of this process"""
+ _children = []
+ for tid in self.get_thread_ids():
+ for child_pid in self.get_children_pids_of_thread(tid):
+ _children.append(Process(child_pid))
+ return _children
+
+ def iter_children(self):
+ """Iterator that yields all the children of a process"""
+ for child in self.children:
+ yield child
+ yield from child.iter_children()
+
+ def wait_for_finish(self):
+ """Waits until the process finishes
+ This only works if self.pid is a child process of Lutris
+ """
+ try:
+ pid, ret_status = os.waitpid(int(self.pid) * -1, 0)
+ except OSError as ex:
+ return -1
+ return ret_status
diff --git a/app/src/main/python/gogdl/saves.py b/app/src/main/python/gogdl/saves.py
new file mode 100644
index 000000000..9f2994247
--- /dev/null
+++ b/app/src/main/python/gogdl/saves.py
@@ -0,0 +1,365 @@
+"""
+Android-compatible GOG cloud save synchronization
+Adapted from heroic-gogdl saves.py
+"""
+
+import os
+import sys
+import logging
+import requests
+import hashlib
+import datetime
+import gzip
+from enum import Enum
+
+import gogdl.dl.dl_utils as dl_utils
+import gogdl.constants as constants
+
+LOCAL_TIMEZONE = datetime.datetime.utcnow().astimezone().tzinfo
+
+
+class SyncAction(Enum):
+ DOWNLOAD = 0
+ UPLOAD = 1
+ CONFLICT = 2
+ NONE = 3
+
+
+class SyncFile:
+ def __init__(self, path, abs_path, md5=None, update_time=None):
+ self.relative_path = path.replace('\\', '/') # cloud file identifier
+ self.absolute_path = abs_path
+ self.md5 = md5
+ self.update_time = update_time
+ self.update_ts = (
+ datetime.datetime.fromisoformat(update_time).astimezone().timestamp()
+ if update_time
+ else None
+ )
+
+ def get_file_metadata(self):
+ ts = os.stat(self.absolute_path).st_mtime
+ date_time_obj = datetime.datetime.fromtimestamp(
+ ts, tz=LOCAL_TIMEZONE
+ ).astimezone(datetime.timezone.utc)
+ self.md5 = hashlib.md5(
+ gzip.compress(open(self.absolute_path, "rb").read(), 6, mtime=0)
+ ).hexdigest()
+
+ self.update_time = date_time_obj.isoformat(timespec="seconds")
+ self.update_ts = date_time_obj.timestamp()
+
+ def __repr__(self):
+ return f"{self.md5} {self.relative_path}"
+
+
+class CloudStorageManager:
+ def __init__(self, api_handler, authorization_manager):
+ self.api = api_handler
+ self.auth_manager = authorization_manager
+ self.session = requests.Session()
+ self.logger = logging.getLogger("SAVES")
+
+ self.session.headers.update(
+ {"User-Agent": "GOGGalaxyCommunicationService/2.0.13.27 (Windows_32bit) dont_sync_marker/true installation_source/gog",
+ "X-Object-Meta-User-Agent": "GOGGalaxyCommunicationService/2.0.13.27 (Windows_32bit) dont_sync_marker/true installation_source/gog"}
+ )
+
+ self.credentials = dict()
+ self.client_id = str()
+ self.client_secret = str()
+
+ def create_directory_map(self, path: str) -> list:
+ """
+ Creates list of every file in directory to be synced
+ """
+ files = list()
+ try:
+ directory_contents = os.listdir(path)
+ except (OSError, FileNotFoundError):
+ self.logger.warning(f"Cannot access directory: {path}")
+ return files
+
+ for content in directory_contents:
+ abs_path = os.path.join(path, content)
+ if os.path.isdir(abs_path):
+ files.extend(self.create_directory_map(abs_path))
+ else:
+ files.append(abs_path)
+ return files
+
+ @staticmethod
+ def get_relative_path(root: str, path: str) -> str:
+ if not root.endswith("/") and not root.endswith("\\"):
+ root = root + os.sep
+ return path.replace(root, "")
+
+ def sync(self, arguments, unknown_args):
+ try:
+ prefered_action = getattr(arguments, 'prefered_action', None)
+ self.sync_path = os.path.normpath(arguments.path.strip('"'))
+ self.sync_path = self.sync_path.replace("\\", os.sep)
+ self.cloud_save_dir_name = getattr(arguments, 'dirname', 'saves')
+ self.arguments = arguments
+ self.unknown_args = unknown_args
+
+ if not os.path.exists(self.sync_path):
+ self.logger.warning("Provided path doesn't exist, creating")
+ os.makedirs(self.sync_path, exist_ok=True)
+
+ dir_list = self.create_directory_map(self.sync_path)
+ if len(dir_list) == 0:
+ self.logger.info("No files in directory")
+
+ local_files = [
+ SyncFile(self.get_relative_path(self.sync_path, f), f) for f in dir_list
+ ]
+
+ for f in local_files:
+ try:
+ f.get_file_metadata()
+ except Exception as e:
+ self.logger.warning(f"Failed to get metadata for {f.absolute_path}: {e}")
+
+ self.logger.info(f"Local files: {len(dir_list)}")
+
+ # Get authentication credentials
+ try:
+ self.client_id, self.client_secret = self.get_auth_ids()
+ self.get_auth_token()
+ except Exception as e:
+ self.logger.error(f"Authentication failed: {e}")
+ return
+
+ # Get cloud files
+ try:
+ cloud_files = self.get_cloud_files_list()
+ downloadable_cloud = [f for f in cloud_files if f.md5 != "aadd86936a80ee8a369579c3926f1b3c"]
+ except Exception as e:
+ self.logger.error(f"Failed to get cloud files: {e}")
+ return
+
+ # Handle sync logic
+ if len(local_files) > 0 and len(cloud_files) == 0:
+ self.logger.info("No files in cloud, uploading")
+ for f in local_files:
+ try:
+ self.upload_file(f)
+ except Exception as e:
+ self.logger.error(f"Failed to upload {f.relative_path}: {e}")
+ self.logger.info("Done")
+ sys.stdout.write(str(datetime.datetime.now().timestamp()))
+ sys.stdout.flush()
+ return
+
+ elif len(local_files) == 0 and len(cloud_files) > 0:
+ self.logger.info("No files locally, downloading")
+ for f in downloadable_cloud:
+ try:
+ self.download_file(f)
+ except Exception as e:
+ self.logger.error(f"Failed to download {f.relative_path}: {e}")
+ self.logger.info("Done")
+ sys.stdout.write(str(datetime.datetime.now().timestamp()))
+ sys.stdout.flush()
+ return
+
+ # Handle more complex sync scenarios
+ timestamp = float(getattr(arguments, 'timestamp', 0.0))
+ classifier = SyncClassifier.classify(local_files, cloud_files, timestamp)
+
+ action = classifier.get_action()
+ if action == SyncAction.DOWNLOAD:
+ self.logger.info("Downloading newer cloud files")
+ for f in classifier.updated_cloud:
+ try:
+ self.download_file(f)
+ except Exception as e:
+ self.logger.error(f"Failed to download {f.relative_path}: {e}")
+
+ elif action == SyncAction.UPLOAD:
+ self.logger.info("Uploading newer local files")
+ for f in classifier.updated_local:
+ try:
+ self.upload_file(f)
+ except Exception as e:
+ self.logger.error(f"Failed to upload {f.relative_path}: {e}")
+
+ elif action == SyncAction.CONFLICT:
+ self.logger.warning("Sync conflict detected - manual intervention required")
+
+ self.logger.info("Sync completed")
+ sys.stdout.write(str(datetime.datetime.now().timestamp()))
+ sys.stdout.flush()
+
+ except Exception as e:
+ self.logger.error(f"Sync failed: {e}")
+ raise
+
+ def get_auth_ids(self):
+ """Get client credentials from auth manager"""
+ try:
+ # Use the same client ID as the main app
+ client_id = "46899977096215655"
+ client_secret = "9d85c43b1482497dbbce61f6e4aa173a433796eeae2ca8c5f6129f2dc4de46d9"
+ return client_id, client_secret
+ except Exception as e:
+ self.logger.error(f"Failed to get auth IDs: {e}")
+ raise
+
+ def get_auth_token(self):
+ """Get authentication token"""
+ try:
+ # Load credentials from auth file
+ import json
+ with open(self.auth_manager.config_path, 'r') as f:
+ auth_data = json.load(f)
+
+ # Extract credentials for our client ID
+ client_creds = auth_data.get(self.client_id, {})
+ self.credentials = {
+ 'access_token': client_creds.get('access_token', ''),
+ 'user_id': client_creds.get('user_id', '')
+ }
+
+ if not self.credentials['access_token']:
+ raise Exception("No valid access token found")
+
+ # Update session headers
+ self.session.headers.update({
+ 'Authorization': f"Bearer {self.credentials['access_token']}"
+ })
+
+ except Exception as e:
+ self.logger.error(f"Failed to get auth token: {e}")
+ raise
+
+ def get_cloud_files_list(self):
+ """Get list of files from GOG cloud storage"""
+ try:
+ url = f"{constants.GOG_CLOUDSTORAGE}/v1/{self.credentials['user_id']}/{self.client_id}"
+ response = self.session.get(url)
+
+ if not response.ok:
+ self.logger.error(f"Failed to get cloud files: {response.status_code}")
+ return []
+
+ cloud_data = response.json()
+ cloud_files = []
+
+ for item in cloud_data.get('items', []):
+ if self.is_save_file(item):
+ cloud_file = SyncFile(
+ self.get_relative_path(f"{self.cloud_save_dir_name}/", item['name']),
+ "", # No local path for cloud files
+ item.get('hash'),
+ item.get('last_modified')
+ )
+ cloud_files.append(cloud_file)
+
+ return cloud_files
+
+ except Exception as e:
+ self.logger.error(f"Failed to get cloud files list: {e}")
+ return []
+
+ def is_save_file(self, item):
+ """Check if cloud item is a save file"""
+ return item.get("name", "").startswith(self.cloud_save_dir_name)
+
+ def upload_file(self, file: SyncFile):
+ """Upload file to GOG cloud storage"""
+ try:
+ url = f"{constants.GOG_CLOUDSTORAGE}/v1/{self.credentials['user_id']}/{self.client_id}/{self.cloud_save_dir_name}/{file.relative_path}"
+
+ with open(file.absolute_path, 'rb') as f:
+ headers = {
+ 'X-Object-Meta-LocalLastModified': file.update_time,
+ 'Content-Type': 'application/octet-stream'
+ }
+ response = self.session.put(url, data=f, headers=headers)
+
+ if not response.ok:
+ self.logger.error(f"Upload failed for {file.relative_path}: {response.status_code}")
+
+ except Exception as e:
+ self.logger.error(f"Failed to upload {file.relative_path}: {e}")
+
+ def download_file(self, file: SyncFile, retries=3):
+ """Download file from GOG cloud storage"""
+ try:
+ url = f"{constants.GOG_CLOUDSTORAGE}/v1/{self.credentials['user_id']}/{self.client_id}/{self.cloud_save_dir_name}/{file.relative_path}"
+ response = self.session.get(url, stream=True)
+
+ if not response.ok:
+ self.logger.error(f"Download failed for {file.relative_path}: {response.status_code}")
+ return
+
+ # Create local directory structure
+ local_path = os.path.join(self.sync_path, file.relative_path)
+ os.makedirs(os.path.dirname(local_path), exist_ok=True)
+
+ # Download file
+ with open(local_path, 'wb') as f:
+ for chunk in response.iter_content(chunk_size=8192):
+ f.write(chunk)
+
+ # Set file timestamp if available
+ if 'X-Object-Meta-LocalLastModified' in response.headers:
+ try:
+ timestamp = datetime.datetime.fromisoformat(
+ response.headers['X-Object-Meta-LocalLastModified']
+ ).timestamp()
+ os.utime(local_path, (timestamp, timestamp))
+ except Exception as e:
+ self.logger.warning(f"Failed to set timestamp for {file.relative_path}: {e}")
+
+ except Exception as e:
+ if retries > 1:
+ self.logger.debug(f"Failed sync of {file.relative_path}, retrying (retries left {retries - 1})")
+ self.download_file(file, retries - 1)
+ else:
+ self.logger.error(f"Failed to download {file.relative_path}: {e}")
+
+
+class SyncClassifier:
+ def __init__(self):
+ self.action = None
+ self.updated_local = list()
+ self.updated_cloud = list()
+ self.not_existing_locally = list()
+ self.not_existing_remotely = list()
+
+ def get_action(self):
+ if len(self.updated_local) == 0 and len(self.updated_cloud) > 0:
+ self.action = SyncAction.DOWNLOAD
+ elif len(self.updated_local) > 0 and len(self.updated_cloud) == 0:
+ self.action = SyncAction.UPLOAD
+ elif len(self.updated_local) == 0 and len(self.updated_cloud) == 0:
+ self.action = SyncAction.NONE
+ else:
+ self.action = SyncAction.CONFLICT
+ return self.action
+
+ @classmethod
+ def classify(cls, local, cloud, timestamp):
+ classifier = cls()
+
+ local_paths = [f.relative_path for f in local]
+ cloud_paths = [f.relative_path for f in cloud]
+
+ for f in local:
+ if f.relative_path not in cloud_paths:
+ classifier.not_existing_remotely.append(f)
+ if f.update_ts and f.update_ts > timestamp:
+ classifier.updated_local.append(f)
+
+ for f in cloud:
+ if f.md5 == "aadd86936a80ee8a369579c3926f1b3c":
+ continue
+ if f.relative_path not in local_paths:
+ classifier.not_existing_locally.append(f)
+ if f.update_ts and f.update_ts > timestamp:
+ classifier.updated_cloud.append(f)
+
+ return classifier
diff --git a/app/src/main/python/gogdl/xdelta/__init__.py b/app/src/main/python/gogdl/xdelta/__init__.py
new file mode 100644
index 000000000..6ccc12390
--- /dev/null
+++ b/app/src/main/python/gogdl/xdelta/__init__.py
@@ -0,0 +1 @@
+# Python implementation of xdelta3 decoding only
diff --git a/app/src/main/python/gogdl/xdelta/objects.py b/app/src/main/python/gogdl/xdelta/objects.py
new file mode 100644
index 000000000..f2bb9b691
--- /dev/null
+++ b/app/src/main/python/gogdl/xdelta/objects.py
@@ -0,0 +1,139 @@
+from dataclasses import dataclass
+from io import IOBase, BytesIO
+from typing import Optional
+
+@dataclass
+class CodeTable:
+ add_sizes = 17
+ near_modes = 4
+ same_modes = 3
+
+ cpy_sizes = 15
+
+ addcopy_add_max = 4
+ addcopy_near_cpy_max = 6
+ addcopy_same_cpy_max = 4
+
+ copyadd_add_max = 1
+ copyadd_near_cpy_max = 4
+ copyadd_same_cpy_max = 4
+
+ addcopy_max_sizes = [ [6,163,3],[6,175,3],[6,187,3],[6,199,3],[6,211,3],[6,223,3],
+ [4,235,1],[4,239,1],[4,243,1]]
+ copyadd_max_sizes = [[4,247,1],[4,248,1],[4,249,1],[4,250,1],[4,251,1],[4,252,1],
+ [4,253,1],[4,254,1],[4,255,1]]
+
+XD3_NOOP = 0
+XD3_ADD = 1
+XD3_RUN = 2
+XD3_CPY = 3
+
+@dataclass
+class Instruction:
+ type1:int = 0
+ size1:int = 0
+ type2:int = 0
+ size2:int = 0
+
+@dataclass
+class HalfInstruction:
+ type: int = 0
+ size: int = 0
+ addr: int = 0
+
+
+@dataclass
+class AddressCache:
+ s_near = CodeTable.near_modes
+ s_same = CodeTable.same_modes
+ next_slot = 0
+ near_array = [0 for _ in range(s_near)]
+ same_array = [0 for _ in range(s_same * 256)]
+
+ def update(self, addr):
+ self.near_array[self.next_slot] = addr
+ self.next_slot = (self.next_slot + 1) % self.s_near
+
+ self.same_array[addr % (self.s_same*256)] = addr
+
+@dataclass
+class Context:
+ source: IOBase
+ target: IOBase
+
+ data_sec: BytesIO
+ inst_sec: BytesIO
+ addr_sec: BytesIO
+
+ acache: AddressCache
+ dec_pos: int = 0
+ cpy_len: int = 0
+ cpy_off: int = 0
+ dec_winoff: int = 0
+
+ target_buffer: Optional[bytearray] = None
+
+def build_code_table():
+ table: list[Instruction] = []
+ for _ in range(256):
+ table.append(Instruction())
+
+ cpy_modes = 2 + CodeTable.near_modes + CodeTable.same_modes
+ i = 0
+
+ table[i].type1 = XD3_RUN
+ i+=1
+ table[i].type1 = XD3_ADD
+ i+=1
+
+ size1 = 1
+
+ for size1 in range(1, CodeTable.add_sizes + 1):
+ table[i].type1 = XD3_ADD
+ table[i].size1 = size1
+ i+=1
+
+ for mode in range(0, cpy_modes):
+ table[i].type1 = XD3_CPY + mode
+ i += 1
+ for size1 in range(4, 4 + CodeTable.cpy_sizes):
+ table[i].type1 = XD3_CPY + mode
+ table[i].size1 = size1
+ i+=1
+
+
+ for mode in range(cpy_modes):
+ for size1 in range(1, CodeTable.addcopy_add_max + 1):
+ is_near = mode < (2 + CodeTable.near_modes)
+ if is_near:
+ max = CodeTable.addcopy_near_cpy_max
+ else:
+ max = CodeTable.addcopy_same_cpy_max
+ for size2 in range(4, max + 1):
+ table[i].type1 = XD3_ADD
+ table[i].size1 = size1
+ table[i].type2 = XD3_CPY + mode
+ table[i].size2 = size2
+ i+=1
+
+
+ for mode in range(cpy_modes):
+ is_near = mode < (2 + CodeTable.near_modes)
+ if is_near:
+ max = CodeTable.copyadd_near_cpy_max
+ else:
+ max = CodeTable.copyadd_same_cpy_max
+ for size1 in range(4, max + 1):
+ for size2 in range(1, CodeTable.copyadd_add_max + 1):
+ table[i].type1 = XD3_CPY + mode
+ table[i].size1 = size1
+ table[i].type2 = XD3_ADD
+ table[i].size2 = size2
+ i+=1
+
+ return table
+
+CODE_TABLE = build_code_table()
+
+class ChecksumMissmatch(AssertionError):
+ pass
diff --git a/app/src/main/python/gogdl/xdelta/patcher.py b/app/src/main/python/gogdl/xdelta/patcher.py
new file mode 100644
index 000000000..19f3a9f1b
--- /dev/null
+++ b/app/src/main/python/gogdl/xdelta/patcher.py
@@ -0,0 +1,204 @@
+from io import BytesIO
+import math
+from multiprocessing import Queue
+from zlib import adler32
+from gogdl.xdelta import objects
+
+# Convert stfio integer
+def read_integer_stream(stream):
+ res = 0
+ while True:
+ res <<= 7
+ integer = stream.read(1)[0]
+ res |= (integer & 0b1111111)
+ if not (integer & 0b10000000):
+ break
+
+ return res
+
+def parse_halfinst(context: objects.Context, halfinst: objects.HalfInstruction):
+ if halfinst.size == 0:
+ halfinst.size = read_integer_stream(context.inst_sec)
+
+ if halfinst.type >= objects.XD3_CPY:
+ # Decode address
+ mode = halfinst.type - objects.XD3_CPY
+ same_start = 2 + context.acache.s_near
+
+ if mode < same_start:
+ halfinst.addr = read_integer_stream(context.addr_sec)
+
+ if mode == 0:
+ pass
+ elif mode == 1:
+ halfinst.addr = context.dec_pos - halfinst.addr
+ if halfinst.addr < 0:
+ halfinst.addr = context.cpy_len + halfinst.addr
+ else:
+ halfinst.addr += context.acache.near_array[mode - 2]
+ else:
+ mode -= same_start
+ addr = context.addr_sec.read(1)[0]
+ halfinst.addr = context.acache.same_array[(mode * 256) + addr]
+ context.acache.update(halfinst.addr)
+
+ context.dec_pos += halfinst.size
+
+
+def decode_halfinst(context:objects.Context, halfinst: objects.HalfInstruction, speed_queue: Queue):
+ take = halfinst.size
+
+ if halfinst.type == objects.XD3_RUN:
+ byte = context.data_sec.read(1)
+
+ for _ in range(take):
+ context.target_buffer.extend(byte)
+
+ halfinst.type = objects.XD3_NOOP
+ elif halfinst.type == objects.XD3_ADD:
+ buffer = context.data_sec.read(take)
+ assert len(buffer) == take
+ context.target_buffer.extend(buffer)
+ halfinst.type = objects.XD3_NOOP
+ else: # XD3_CPY and higher
+ if halfinst.addr < (context.cpy_len or 0):
+ context.source.seek(context.cpy_off + halfinst.addr)
+ left = take
+ while left > 0:
+ buffer = context.source.read(min(1024 * 1024, left))
+ size = len(buffer)
+ speed_queue.put((0, size))
+ context.target_buffer.extend(buffer)
+ left -= size
+
+ else:
+ print("OVERLAP NOT IMPLEMENTED")
+ raise Exception("OVERLAP")
+ halfinst.type = objects.XD3_NOOP
+
+
+def patch(source: str, patch: str, out: str, speed_queue: Queue):
+ src_handle = open(source, 'rb')
+ patch_handle = open(patch, 'rb')
+ dst_handle = open(out, 'wb')
+
+
+ # Verify if patch is actually xdelta patch
+ headers = patch_handle.read(5)
+ try:
+ assert headers[0] == 0xD6
+ assert headers[1] == 0xC3
+ assert headers[2] == 0xC4
+ except AssertionError:
+ print("Specified patch file is unlikely to be xdelta patch")
+ return
+
+ HDR_INDICATOR = headers[4]
+ COMPRESSOR_ID = HDR_INDICATOR & (1 << 0) != 0
+ CODE_TABLE = HDR_INDICATOR & (1 << 1) != 0
+ APP_HEADER = HDR_INDICATOR & (1 << 2) != 0
+ app_header_data = bytes()
+
+ if COMPRESSOR_ID or CODE_TABLE:
+ print("Compressor ID and codetable are yet not supported")
+ return
+
+ if APP_HEADER:
+ app_header_size = read_integer_stream(patch_handle)
+ app_header_data = patch_handle.read(app_header_size)
+
+ context = objects.Context(src_handle, dst_handle, BytesIO(), BytesIO(), BytesIO(), objects.AddressCache())
+
+ win_number = 0
+ win_indicator = patch_handle.read(1)[0]
+ while win_indicator is not None:
+ context.acache = objects.AddressCache()
+ source_used = win_indicator & (1 << 0) != 0
+ target_used = win_indicator & (1 << 1) != 0
+ adler32_sum = win_indicator & (1 << 2) != 0
+
+ if source_used:
+ source_segment_length = read_integer_stream(patch_handle)
+ source_segment_position = read_integer_stream(patch_handle)
+ else:
+ source_segment_length = 0
+ source_segment_position = 0
+
+ context.cpy_len = source_segment_length
+ context.cpy_off = source_segment_position
+ context.source.seek(context.cpy_off or 0)
+ context.dec_pos = 0
+
+ # Parse delta
+ delta_encoding_length = read_integer_stream(patch_handle)
+
+ window_length = read_integer_stream(patch_handle)
+ context.target_buffer = bytearray()
+
+ delta_indicator = patch_handle.read(1)[0]
+
+ add_run_data_length = read_integer_stream(patch_handle)
+ instructions_length = read_integer_stream(patch_handle)
+ addresses_length = read_integer_stream(patch_handle)
+
+ parsed_sum = 0
+ if adler32_sum:
+ checksum = patch_handle.read(4)
+ parsed_sum = int.from_bytes(checksum, 'big')
+
+
+ context.data_sec = BytesIO(patch_handle.read(add_run_data_length))
+ context.inst_sec = BytesIO(patch_handle.read(instructions_length))
+ context.addr_sec = BytesIO(patch_handle.read(addresses_length))
+
+
+ current1 = objects.HalfInstruction()
+ current2 = objects.HalfInstruction()
+
+ while context.inst_sec.tell() < instructions_length or current1.type != objects.XD3_NOOP or current2.type != objects.XD3_NOOP:
+ if current1.type == objects.XD3_NOOP and current2.type == objects.XD3_NOOP:
+ ins = objects.CODE_TABLE[context.inst_sec.read(1)[0]]
+ current1.type = ins.type1
+ current2.type = ins.type2
+ current1.size = ins.size1
+ current2.size = ins.size2
+
+ if current1.type != objects.XD3_NOOP:
+ parse_halfinst(context, current1)
+ if current2.type != objects.XD3_NOOP:
+ parse_halfinst(context, current2)
+
+ while current1.type != objects.XD3_NOOP:
+ decode_halfinst(context, current1, speed_queue)
+
+ while current2.type != objects.XD3_NOOP:
+ decode_halfinst(context, current2, speed_queue)
+
+ if adler32_sum:
+ calculated_sum = adler32(context.target_buffer)
+ if parsed_sum != calculated_sum:
+ raise objects.ChecksumMissmatch
+
+ total_size = len(context.target_buffer)
+ chunk_size = 1024 * 1024
+ for i in range(math.ceil(total_size / chunk_size)):
+ chunk = context.target_buffer[i * chunk_size : min((i + 1) * chunk_size, total_size)]
+ context.target.write(chunk)
+ speed_queue.put((len(chunk), 0))
+
+ context.target.flush()
+
+ indicator = patch_handle.read(1)
+ if not len(indicator):
+ win_indicator = None
+ continue
+ win_indicator = indicator[0]
+ win_number += 1
+
+
+ dst_handle.flush()
+ src_handle.close()
+ patch_handle.close()
+ dst_handle.close()
+
+