Подтвердить что ты не робот

Быстрое преобразование между UInt и Int

У меня есть следующие методы

var photos = [MWPhoto] = [MWPhoto]()

func numberOfPhotosInPhotoBrowser(photoBrowser: MWPhotoBrowser!) -> UInt {

    return self.photos.count
}

func photoBrowser(photoBrowser: MWPhotoBrowser!, photoAtIndex index: UInt) -> MWPhotoProtocol! {

    return self.photos[index]
}

Однако для первого я получаю Int is not convertible to UInt (так как self.photos.count является Int

и для второго UInt is not convertible to Int - поскольку self.photos[ может принимать только Int для своего индекса.

Как я могу правильно преобразовать UInt в Int и обратно?

4b9b3361

Ответ 1

В первом случае тип возврата UInt, но вы возвращаете Int, поскольку count возвращает Int.

В основном UInt имеет инициализатор, который принимает варианты аргументов типов значений, таких как Int, CGFloat, Double или строка события и возвращает новый тип значения.

  • UInt (8)//результат - это 8 значений типа UInt
  • UInt (20.12)//результат - 20 значений типа UInt
  • UInt (Double (10))//результат - это 10 значений типа UInt
  • UInt ( "10" )//результат - это 10 значений типа UInt, обратите внимание, что это инициализатор с ошибкой, может быть значением или nil

-

func numberOfPhotosInPhotoBrowser(photoBrowser: MWPhotoBrowser!) -> UInt {

    return UInt(self.photos.count)
}

Для второго индекса индекс массива ожидает значение Int, где вы передаете UInt, поэтому создайте новый тип Int из UInt,

func photoBrowser(photoBrowser: MWPhotoBrowser!, photoAtIndex index: UInt) -> MWPhotoProtocol! {

    return self.photos[Int(index)]
}

Ответ 2

// initializing Int
var someInt: Int = 8
someInt

// Converting Int to UInt
var someIntToUInt: UInt = UInt(someInt)
someIntToUInt

// initializing UInt   
var someUInt: UInt = 10
someUInt

// Converting UInt to Int   
var someUIntToInt: Int = Int(someUInt)
someUIntToInt

Ответ 3

Если вы хотите, чтобы unsigned int с отрицательным значением, используйте UInt (bitPattern:)

let intVal = -1
let uintVal = UInt(bitPattern: intVal) // uintVal == 0xffffffffffffffff

Ответ 4

Добавьте это вне класса:

extension UInt {
    /// SwiftExtensionKit
    var toInt: Int { return Int(self) }
}

Затем просто позвоните:

self.photos[index].toInt

Ответ 5

Я так расстроился с параметрами Swift cryptic method bitPattern: и truncatingBitPattern: и моей неспособностью запомнить, какой из них использовать, когда я создал следующий класс, содержащий большое количество методов преобразования.

Я не обязательно рекомендую включить это в свою программу. Я уверен, что многие люди скажут, что Свифт пытается защитить нас от нас самих, и это саботирует эти усилия глупо. Так что, возможно, вам стоит просто сохранить этот файл как своего рода чит-лист, чтобы вы могли быстро определить, как сделать преобразование, и скопировать параметры в вашу программу, когда это необходимо.

Кстати, JDI означает "просто сделай это".

/// Class containing a large number of static methods to convert an Int to a UInt or vice-versa, and 
/// also to perform conversions between different bit sizes, for example UInt32 to UInt8.
///
/// Many of these "conversions" are trivial, and are only included for the sake of completeness.
///
/// A few of the conversions involving Int and UInt can give different results when run on 32-bit
/// and 64-bit systems. All of the conversion where the bit size of both the source and the target
/// are specified will always give the same result independent of platform.
public class JDI {

   // MARK: - To signed Int

   // To Int8
   public static func ToInt8(_ x : Int8) -> Int8 {
      return x
   }
   public static func ToInt8(_ x : Int32) -> Int8 {
      return Int8(truncatingBitPattern: x)
   }
   public static func ToInt8(_ x : Int64) -> Int8 {
      return Int8(truncatingBitPattern: x)
   }
   public static func ToInt8(_ x : Int) -> Int8 {
      return Int8(truncatingBitPattern: x)
   }
   public static func ToInt8(_ x : UInt8) -> Int8 {
      return Int8(bitPattern: x)
   }
   public static func ToInt8(_ x : UInt32) -> Int8 {
      return Int8(truncatingBitPattern: x)
   }
   public static func ToInt8(_ x : UInt64) -> Int8 {
      return Int8(truncatingBitPattern: x)
   }
   public static func ToInt8(_ x : UInt) -> Int8 {
      return Int8(truncatingBitPattern: x)
   }

   // To Int32
   public static func ToInt32(_ x : Int8) -> Int32 {
      return Int32(x)
   }
   public static func ToInt32(_ x : Int32) -> Int32 {
      return x
   }
   public static func ToInt32(_ x : Int64) -> Int32 {
      return Int32(truncatingBitPattern: x)
   }
   public static func ToInt32(_ x : Int) -> Int32 {
      return Int32(truncatingBitPattern: x)
   }
   public static func ToInt32(_ x : UInt8) -> Int32 {
      return Int32(x)
   }
   public static func ToInt32(_ x : UInt32) -> Int32 {
      return Int32(bitPattern: x)
   }
   public static func ToInt32(_ x : UInt64) -> Int32 {
      return Int32(truncatingBitPattern: x)
   }
   public static func ToInt32(_ x : UInt) -> Int32 {
      return Int32(truncatingBitPattern: x)
   }

   // To Int64
   public static func ToInt64(_ x : Int8) -> Int64 {
      return Int64(x)
   }
   public static func ToInt64(_ x : Int32) -> Int64 {
      return Int64(x)
   }
   public static func ToInt64(_ x : Int64) -> Int64 {
      return x
   }
   public static func ToInt64(_ x : Int) -> Int64 {
      return Int64(x)
   }
   public static func ToInt64(_ x : UInt8) -> Int64 {
      return Int64(x)
   }
   public static func ToInt64(_ x : UInt32) -> Int64 {
      return Int64(x)
   }
   public static func ToInt64(_ x : UInt64) -> Int64 {
      return Int64(bitPattern: x)
   }
   public static func ToInt64(_ x : UInt) -> Int64 {
      return Int64(bitPattern: UInt64(x))  // Does not extend high bit of 32-bit input
   }

   // To Int
   public static func ToInt(_ x : Int8) -> Int {
      return Int(x)
   }
   public static func ToInt(_ x : Int32) -> Int {
      return Int(x)
   }
   public static func ToInt(_ x : Int64) -> Int {
      return Int(truncatingBitPattern: x)
   }
   public static func ToInt(_ x : Int) -> Int {
      return x
   }
   public static func ToInt(_ x : UInt8) -> Int {
      return Int(x)
   }
   public static func ToInt(_ x : UInt32) -> Int {
      if MemoryLayout<Int>.size == MemoryLayout<Int32>.size {
         return Int(Int32(bitPattern: x))  // For 32-bit systems, non-authorized interpretation
      }
      return Int(x)
   }
   public static func ToInt(_ x : UInt64) -> Int {
      return Int(truncatingBitPattern: x)
   }
   public static func ToInt(_ x : UInt) -> Int {
      return Int(bitPattern: x)
   }

   // MARK: - To unsigned Int

   // To UInt8
   public static func ToUInt8(_ x : Int8) -> UInt8 {
      return UInt8(bitPattern: x)
   }
   public static func ToUInt8(_ x : Int32) -> UInt8 {
      return UInt8(truncatingBitPattern: x)
   }
   public static func ToUInt8(_ x : Int64) -> UInt8 {
      return UInt8(truncatingBitPattern: x)
   }
   public static func ToUInt8(_ x : Int) -> UInt8 {
      return UInt8(truncatingBitPattern: x)
   }
   public static func ToUInt8(_ x : UInt8) -> UInt8 {
      return x
   }
   public static func ToUInt8(_ x : UInt32) -> UInt8 {
      return UInt8(truncatingBitPattern: x)
   }
   public static func ToUInt8(_ x : UInt64) -> UInt8 {
      return UInt8(truncatingBitPattern: x)
   }
   public static func ToUInt8(_ x : UInt) -> UInt8 {
      return UInt8(truncatingBitPattern: x)
   }

   // To UInt32
   public static func ToUInt32(_ x : Int8) -> UInt32 {
      return UInt32(bitPattern: Int32(x))  // Extend sign bit, assume minus input significant
   }
   public static func ToUInt32(_ x : Int32) -> UInt32 {
      return UInt32(bitPattern: x)
   }
   public static func ToUInt32(_ x : Int64) -> UInt32 {
      return UInt32(truncatingBitPattern: x)
   }
   public static func ToUInt32(_ x : Int) -> UInt32 {
      return UInt32(truncatingBitPattern: x)
   }
   public static func ToUInt32(_ x : UInt8) -> UInt32 {
      return UInt32(x)
   }
   public static func ToUInt32(_ x : UInt32) -> UInt32 {
      return x
   }
   public static func ToUInt32(_ x : UInt64) -> UInt32 {
      return UInt32(truncatingBitPattern: x)
   }
   public static func ToUInt32(_ x : UInt) -> UInt32 {
      return UInt32(truncatingBitPattern: x)
   }

   // To UInt64
   public static func ToUInt64(_ x : Int8) -> UInt64 {
      return UInt64(bitPattern: Int64(x))  // Extend sign bit, assume minus input significant
   }
   public static func ToUInt64(_ x : Int32) -> UInt64 {
      return UInt64(bitPattern: Int64(x))  // Extend sign bit, assume minus input significant
   }
   public static func ToUInt64(_ x : Int64) -> UInt64 {
      return UInt64(bitPattern: x)
   }
   public static func ToUInt64(_ x : Int) -> UInt64 {
      return UInt64(bitPattern: Int64(x))  // Extend sign bit if necessary, assume minus input significant
   }
   public static func ToUInt64(_ x : UInt8) -> UInt64 {
      return UInt64(x)
   }
   public static func ToUInt64(_ x : UInt32) -> UInt64 {
      return UInt64(x)
   }
   public static func ToUInt64(_ x : UInt64) -> UInt64 {
      return x
   }
   public static func ToUInt64(_ x : UInt) -> UInt64 {
      return UInt64(x)  // Does not extend high bit of 32-bit input
   }

   // To UInt
   public static func ToUInt(_ x : Int8) -> UInt {
      return UInt(bitPattern: Int(x))  // Extend sign bit, assume minus input significant
   }
   public static func ToUInt(_ x : Int32) -> UInt {
      return UInt(truncatingBitPattern: Int64(x))  // Extend sign bit, assume minus input significant
   }
   public static func ToUInt(_ x : Int64) -> UInt {
      return UInt(truncatingBitPattern: x)
   }
   public static func ToUInt(_ x : Int) -> UInt {
      return UInt(bitPattern: x)
   }
   public static func ToUInt(_ x : UInt8) -> UInt {
      return UInt(x)
   }
   public static func ToUInt(_ x : UInt32) -> UInt {
      return UInt(x)
   }
   public static func ToUInt(_ x : UInt64) -> UInt {
      return UInt(truncatingBitPattern: x)
   }
   public static func ToUInt(_ x : UInt) -> UInt {
      return x
   }
}

Вот несколько тестовых кодов:

   public func doTest() {

      // To Int8

      assert(JDI.ToInt8(42 as Int8) == 42)
      assert(JDI.ToInt8(-13 as Int8) == -13)

      assert(JDI.ToInt8(42 as Int32) == 42)
      assert(JDI.ToInt8(257 as Int32) == 1)

      assert(JDI.ToInt8(42 as Int64) == 42)
      assert(JDI.ToInt8(257 as Int64) == 1)

      assert(JDI.ToInt8(42 as Int) == 42)
      assert(JDI.ToInt8(257 as Int) == 1)

      assert(JDI.ToInt8(42 as UInt8) == 42)
      assert(JDI.ToInt8(0xf3 as UInt8) == -13)

      assert(JDI.ToInt8(42 as UInt32) == 42)
      assert(JDI.ToInt8(0xfffffff3 as UInt32) == -13)

      assert(JDI.ToInt8(42 as UInt64) == 42)
      assert(JDI.ToInt8(UInt64.max - 12) == -13)

      assert(JDI.ToInt8(42 as UInt) == 42)
      assert(JDI.ToInt8(UInt.max - 12) == -13)

      // To Int32

      assert(JDI.ToInt32(42 as Int8) == 42)
      assert(JDI.ToInt32(-13 as Int8) == -13)

      assert(JDI.ToInt32(42 as Int32) == 42)
      assert(JDI.ToInt32(-13 as Int32) == -13)

      assert(JDI.ToInt32(42 as Int64) == 42)
      assert(JDI.ToInt32(Int64(Int32.min) - 1) == Int32.max)

      assert(JDI.ToInt32(42 as Int) == 42)
      assert(JDI.ToInt32(-13 as Int) == -13)

      assert(JDI.ToInt32(42 as UInt8) == 42)
      assert(JDI.ToInt32(0xf3 as UInt8) == 243)

      assert(JDI.ToInt32(42 as UInt32) == 42)
      assert(JDI.ToInt32(0xfffffff3 as UInt32) == -13)

      assert(JDI.ToInt32(42 as UInt64) == 42)
      assert(JDI.ToInt32(UInt64.max - 12) == -13)

      assert(JDI.ToInt32(42 as UInt) == 42)
      assert(JDI.ToInt32(UInt.max - 12) == -13)

      // To Int64

      assert(JDI.ToInt64(42 as Int8) == 42)
      assert(JDI.ToInt64(-13 as Int8) == -13)

      assert(JDI.ToInt64(42 as Int32) == 42)
      assert(JDI.ToInt64(-13 as Int32) == -13)

      assert(JDI.ToInt64(42 as Int64) == 42)
      assert(JDI.ToInt64(-13 as Int64) == -13)

      assert(JDI.ToInt64(42 as Int) == 42)
      assert(JDI.ToInt64(-13 as Int) == -13)

      assert(JDI.ToInt64(42 as UInt8) == 42)
      assert(JDI.ToInt64(0xf3 as UInt8) == 243)

      assert(JDI.ToInt64(42 as UInt32) == 42)
      assert(JDI.ToInt64(0xfffffff3 as UInt32) == 4294967283)

      assert(JDI.ToInt64(42 as UInt64) == 42)
      assert(JDI.ToInt64(UInt64.max - 12) == -13)

      assert(JDI.ToInt64(42 as UInt) == 42)
      #if (arch(i386) || arch(arm))
         assert(JDI.ToInt64(UInt.max - 12) == 4294967283)  // For 32-bit systems
      #else
         assert(JDI.ToInt64(UInt.max - 12) == -13)  // For 64-bit systems
      #endif

      // To Int

      assert(JDI.ToInt(42 as Int8) == 42)
      assert(JDI.ToInt(-13 as Int8) == -13)

      assert(JDI.ToInt(42 as Int32) == 42)
      assert(JDI.ToInt(-13 as Int32) == -13)

      assert(JDI.ToInt(42 as Int64) == 42)
      assert(JDI.ToInt(-13 as Int64) == -13)

      assert(JDI.ToInt(42 as Int) == 42)
      assert(JDI.ToInt(-13 as Int) == -13)

      assert(JDI.ToInt(42 as UInt8) == 42)
      assert(JDI.ToInt(0xf3 as UInt8) == 243)

      assert(JDI.ToInt(42 as UInt32) == 42)
      #if (arch(i386) || arch(arm))
         assert(JDI.ToInt(0xfffffff3 as UInt32) == -13)  // For 32-bit systems
      #else
         assert(JDI.ToInt(0xfffffff3 as UInt32) == 4294967283)  // For 64-bit systems
      #endif

      assert(JDI.ToInt(42 as UInt64) == 42)
      assert(JDI.ToInt(UInt64.max - 12) == -13)

      assert(JDI.ToInt(42 as UInt) == 42)
      assert(JDI.ToInt(UInt.max - 12) == -13)

      // To UInt8

      assert(JDI.ToUInt8(42 as Int8) == 42)
      assert(JDI.ToUInt8(-13 as Int8) == 0xf3)

      assert(JDI.ToUInt8(42 as Int32) == 42)
      assert(JDI.ToUInt8(-13 as Int32) == 0xf3)

      assert(JDI.ToUInt8(42 as Int64) == 42)
      assert(JDI.ToUInt8(-13 as Int64) == 0xf3)
      assert(JDI.ToUInt8(Int64.max - 12) == 0xf3)

      assert(JDI.ToUInt8(42 as Int) == 42)
      assert(JDI.ToUInt8(-13 as Int) == 0xf3)
      assert(JDI.ToUInt8(Int.max - 12) == 0xf3)

      assert(JDI.ToUInt8(42 as UInt8) == 42)
      assert(JDI.ToUInt8(0xf3 as UInt8) == 0xf3)

      assert(JDI.ToUInt8(42 as UInt32) == 42)
      assert(JDI.ToUInt8(0xfffffff3 as UInt32) == 0xf3)

      assert(JDI.ToUInt8(42 as UInt64) == 42)
      assert(JDI.ToUInt8(UInt64.max - 12) == 0xf3)

      assert(JDI.ToUInt8(42 as UInt) == 42)
      assert(JDI.ToUInt8(UInt.max - 12) == 0xf3)

      // To UInt32

      assert(JDI.ToUInt32(42 as Int8) == 42)
      assert(JDI.ToUInt32(-13 as Int8) == 0xfffffff3)

      assert(JDI.ToUInt32(42 as Int32) == 42)
      assert(JDI.ToUInt32(-13 as Int32) == 0xfffffff3)

      assert(JDI.ToUInt32(42 as Int64) == 42)
      assert(JDI.ToUInt32(-13 as Int64) == 0xfffffff3)
      assert(JDI.ToUInt32(Int64.max - 12) == 0xfffffff3)

      assert(JDI.ToUInt32(42 as Int) == 42)
      assert(JDI.ToUInt32(-13 as Int) == 0xfffffff3)
      #if (arch(i386) || arch(arm))
         assert(JDI.ToUInt32(Int.max - 12) == 0x7ffffff3)  // For 32-bit systems
      #else
         assert(JDI.ToUInt32(Int.max - 12) == 0xfffffff3)  // For 64-bit systems
      #endif

      assert(JDI.ToUInt32(42 as UInt8) == 42)
      assert(JDI.ToUInt32(0xf3 as UInt8) == 0xf3)

      assert(JDI.ToUInt32(42 as UInt32) == 42)
      assert(JDI.ToUInt32(0xfffffff3 as UInt32) == 0xfffffff3)

      assert(JDI.ToUInt32(42 as UInt64) == 42)
      assert(JDI.ToUInt32(UInt64.max - 12) == 0xfffffff3)

      assert(JDI.ToUInt32(42 as UInt) == 42)
      assert(JDI.ToUInt32(UInt.max - 12) == 0xfffffff3)

      // To UInt64

      assert(JDI.ToUInt64(42 as Int8) == 42)
      assert(JDI.ToUInt64(-13 as Int8) == 0xfffffffffffffff3)

      assert(JDI.ToUInt64(42 as Int32) == 42)
      assert(JDI.ToUInt64(-13 as Int32) == 0xfffffffffffffff3)

      assert(JDI.ToUInt64(42 as Int64) == 42)
      assert(JDI.ToUInt64(-13 as Int64) == 0xfffffffffffffff3)
      assert(JDI.ToUInt64(Int64.max - 12) == (UInt64.max >> 1) - 12)

      assert(JDI.ToUInt64(42 as Int) == 42)
      assert(JDI.ToUInt64(-13 as Int) == 0xfffffffffffffff3)
      #if (arch(i386) || arch(arm))
         assert(JDI.ToUInt64(Int.max - 12) == 0x7ffffff3)  // For 32-bit systems
      #else
         assert(JDI.ToUInt64(Int.max - 12) == 0x7ffffffffffffff3)  // For 64-bit systems
      #endif

      assert(JDI.ToUInt64(42 as UInt8) == 42)
      assert(JDI.ToUInt64(0xf3 as UInt8) == 0xf3)

      assert(JDI.ToUInt64(42 as UInt32) == 42)
      assert(JDI.ToUInt64(0xfffffff3 as UInt32) == 0xfffffff3)

      assert(JDI.ToUInt64(42 as UInt64) == 42)
      assert(JDI.ToUInt64(UInt64.max - 12) == 0xfffffffffffffff3)

      assert(JDI.ToUInt64(42 as UInt) == 42)
      #if (arch(i386) || arch(arm))
         assert(JDI.ToUInt64(UInt.max - 12) == 0xfffffff3)  // For 32-bit systems
      #else
         assert(JDI.ToUInt64(UInt.max - 12) == 0xfffffffffffffff3)  // For 64-bit systems
      #endif

      // To UInt

      assert(JDI.ToUInt(42 as Int8) == 42)
      #if (arch(i386) || arch(arm))
         assert(JDI.ToUInt(-13 as Int8) == 0xfffffff3)  // For 32-bit systems
      #else
         assert(JDI.ToUInt(-13 as Int8) == 0xfffffffffffffff3)  // For 64-bit systems
      #endif

      assert(JDI.ToUInt(42 as Int32) == 42)
      #if (arch(i386) || arch(arm))
         assert(JDI.ToUInt(-13 as Int32) == 0xfffffff3)  // For 32-bit systems
      #else
         assert(JDI.ToUInt(-13 as Int32) == 0xfffffffffffffff3)  // For 64-bit systems
      #endif

      assert(JDI.ToUInt(42 as Int64) == 42)
      #if (arch(i386) || arch(arm))
         assert(JDI.ToUInt(-13 as Int64) == 0xfffffff3)  // For 32-bit systems
         assert(JDI.ToUInt(Int64.max - 12) == 0xfffffff3)
      #else
         assert(JDI.ToUInt(-13 as Int64) == 0xfffffffffffffff3)  // For 64-bit systems
         assert(JDI.ToUInt(Int64.max - 12) == 0x7ffffffffffffff3)
      #endif

      assert(JDI.ToUInt(42 as Int) == 42)
      #if (arch(i386) || arch(arm))
         assert(JDI.ToUInt(Int.max - 12) == 0x7ffffff3)  // For 32-bit systems
      #else
         assert(JDI.ToUInt(Int.max - 12) == 0x7ffffffffffffff3)  // For 64-bit systems
      #endif

      assert(JDI.ToUInt(42 as UInt8) == 42)
      assert(JDI.ToUInt(0xf3 as UInt8) == 0xf3)

      assert(JDI.ToUInt(42 as UInt32) == 42)
      assert(JDI.ToUInt(0xfffffff3 as UInt32) == 0xfffffff3)

      assert(JDI.ToUInt(42 as UInt64) == 42)
      #if (arch(i386) || arch(arm))
         assert(JDI.ToUInt(UInt64.max - 12) == 0xfffffff3)  // For 32-bit systems
      #else
         assert(JDI.ToUInt(UInt64.max - 12) == 0xfffffffffffffff3)  // For 64-bit systems
      #endif

      assert(JDI.ToUInt(42 as UInt) == 42)
      #if (arch(i386) || arch(arm))
         assert(JDI.ToUInt(UInt.max - 12) == 0xfffffff3)  // For 32-bit systems
      #else
         assert(JDI.ToUInt(UInt.max - 12) == 0xfffffffffffffff3)  // For 64-bit systems
      #endif

      print("\nTesting JDI complete.\n")
   }